ansible-playbook [core 2.17.12] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-lTb executable location = /usr/local/bin/ansible-playbook python version = 3.12.11 (main, Jun 4 2025, 00:00:00) [GCC 14.2.1 20250110 (Red Hat 14.2.1-8)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_pod.yml ************************************************ 2 plays in /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:5 Monday 07 July 2025 20:15:05 -0400 (0:00:00.020) 0:00:00.020 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-qjs/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet pods] **************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 Monday 07 July 2025 20:15:05 -0400 (0:00:00.020) 0:00:00.041 *********** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Run the role - root] ***************************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:34 Monday 07 July 2025 20:15:06 -0400 (0:00:01.170) 0:00:01.211 *********** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:15:06 -0400 (0:00:00.052) 0:00:01.264 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:15:06 -0400 (0:00:00.023) 0:00:01.287 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:15:06 -0400 (0:00:00.035) 0:00:01.322 *********** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:15:07 -0400 (0:00:00.447) 0:00:01.769 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:15:07 -0400 (0:00:00.022) 0:00:01.792 *********** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:15:07 -0400 (0:00:00.372) 0:00:02.164 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:15:07 -0400 (0:00:00.021) 0:00:02.186 *********** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:15:07 -0400 (0:00:00.040) 0:00:02.227 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:15:08 -0400 (0:00:01.130) 0:00:03.357 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:15:08 -0400 (0:00:00.045) 0:00:03.403 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:15:08 -0400 (0:00:00.048) 0:00:03.452 *********** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:15:08 -0400 (0:00:00.044) 0:00:03.496 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:15:09 -0400 (0:00:00.045) 0:00:03.542 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:15:09 -0400 (0:00:00.046) 0:00:03.589 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025837", "end": "2025-07-07 20:15:09.493316", "rc": 0, "start": "2025-07-07 20:15:09.467479" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:15:09 -0400 (0:00:00.477) 0:00:04.066 *********** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:15:09 -0400 (0:00:00.030) 0:00:04.096 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:15:09 -0400 (0:00:00.029) 0:00:04.126 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:15:09 -0400 (0:00:00.048) 0:00:04.175 *********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:15:09 -0400 (0:00:00.050) 0:00:04.225 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:15:09 -0400 (0:00:00.072) 0:00:04.298 *********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:15:09 -0400 (0:00:00.046) 0:00:04.345 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:09 -0400 (0:00:00.056) 0:00:04.402 *********** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:10 -0400 (0:00:00.473) 0:00:04.875 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:10 -0400 (0:00:00.032) 0:00:04.908 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:10 -0400 (0:00:00.040) 0:00:04.948 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:10 -0400 (0:00:00.383) 0:00:05.331 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:10 -0400 (0:00:00.030) 0:00:05.362 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:10 -0400 (0:00:00.029) 0:00:05.391 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:10 -0400 (0:00:00.027) 0:00:05.419 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:10 -0400 (0:00:00.028) 0:00:05.448 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:10 -0400 (0:00:00.029) 0:00:05.478 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:11 -0400 (0:00:00.030) 0:00:05.508 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:11 -0400 (0:00:00.028) 0:00:05.537 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:15:11 -0400 (0:00:00.029) 0:00:05.567 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:15:11 -0400 (0:00:00.064) 0:00:05.631 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:15:11 -0400 (0:00:00.081) 0:00:05.712 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:15:11 -0400 (0:00:00.029) 0:00:05.742 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:15:11 -0400 (0:00:00.028) 0:00:05.770 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:15:11 -0400 (0:00:00.052) 0:00:05.823 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:15:11 -0400 (0:00:00.027) 0:00:05.851 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:15:11 -0400 (0:00:00.026) 0:00:05.877 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:15:11 -0400 (0:00:00.053) 0:00:05.930 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:15:11 -0400 (0:00:00.028) 0:00:05.958 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:15:11 -0400 (0:00:00.027) 0:00:05.986 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:15:11 -0400 (0:00:00.055) 0:00:06.042 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:15:11 -0400 (0:00:00.027) 0:00:06.069 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:15:11 -0400 (0:00:00.028) 0:00:06.098 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:15:11 -0400 (0:00:00.027) 0:00:06.125 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:15:11 -0400 (0:00:00.028) 0:00:06.154 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:15:11 -0400 (0:00:00.028) 0:00:06.182 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:15:11 -0400 (0:00:00.027) 0:00:06.209 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:15:11 -0400 (0:00:00.056) 0:00:06.265 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:15:11 -0400 (0:00:00.025) 0:00:06.290 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:15:11 -0400 (0:00:00.025) 0:00:06.316 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:15:11 -0400 (0:00:00.024) 0:00:06.340 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:15:11 -0400 (0:00:00.025) 0:00:06.366 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:15:11 -0400 (0:00:00.079) 0:00:06.445 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:15:11 -0400 (0:00:00.037) 0:00:06.483 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:15:12 -0400 (0:00:00.035) 0:00:06.518 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:15:12 -0400 (0:00:00.028) 0:00:06.547 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:15:12 -0400 (0:00:00.043) 0:00:06.591 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:12 -0400 (0:00:00.060) 0:00:06.651 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:12 -0400 (0:00:00.031) 0:00:06.683 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:12 -0400 (0:00:00.034) 0:00:06.717 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:12 -0400 (0:00:00.040) 0:00:06.757 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:12 -0400 (0:00:00.386) 0:00:07.144 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:12 -0400 (0:00:00.030) 0:00:07.174 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:12 -0400 (0:00:00.029) 0:00:07.204 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:12 -0400 (0:00:00.062) 0:00:07.267 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:12 -0400 (0:00:00.030) 0:00:07.298 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:12 -0400 (0:00:00.030) 0:00:07.329 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:12 -0400 (0:00:00.030) 0:00:07.359 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:12 -0400 (0:00:00.030) 0:00:07.389 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:15:12 -0400 (0:00:00.030) 0:00:07.420 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:15:12 -0400 (0:00:00.048) 0:00:07.468 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:15:12 -0400 (0:00:00.031) 0:00:07.499 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:15:13 -0400 (0:00:00.029) 0:00:07.529 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:15:13 -0400 (0:00:00.070) 0:00:07.599 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:15:13 -0400 (0:00:00.035) 0:00:07.634 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Monday 07 July 2025 20:15:13 -0400 (0:00:00.034) 0:00:07.669 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 07 July 2025 20:15:13 -0400 (0:00:00.063) 0:00:07.732 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 07 July 2025 20:15:13 -0400 (0:00:00.049) 0:00:07.782 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 07 July 2025 20:15:13 -0400 (0:00:00.026) 0:00:07.808 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 07 July 2025 20:15:13 -0400 (0:00:00.027) 0:00:07.836 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 07 July 2025 20:15:13 -0400 (0:00:00.028) 0:00:07.864 *********** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 07 July 2025 20:15:13 -0400 (0:00:00.057) 0:00:07.922 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 07 July 2025 20:15:13 -0400 (0:00:00.029) 0:00:07.952 *********** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Monday 07 July 2025 20:15:13 -0400 (0:00:00.479) 0:00:08.431 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Monday 07 July 2025 20:15:13 -0400 (0:00:00.031) 0:00:08.463 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Monday 07 July 2025 20:15:13 -0400 (0:00:00.030) 0:00:08.493 *********** changed: [managed-node2] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/etc/containers/systemd/quadlet-pod-pod.pod", "gid": 0, "group": "root", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1751933714.0353012-19840-138139497644559/.source.pod", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Monday 07 July 2025 20:15:14 -0400 (0:00:00.796) 0:00:09.290 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Monday 07 July 2025 20:15:15 -0400 (0:00:00.914) 0:00:10.205 *********** changed: [managed-node2] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket basic.target sysinit.target network-online.target -.mount system.slice", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3631038464", "EffectiveMemoryMax": "3631038464", "EffectiveTasksMax": "21944", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/quadlet-pod-pod-pod.pid --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13715", "LimitNPROCSoft": "13715", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13715", "LimitSIGPENDINGSoft": "13715", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3071750144", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21944", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Monday 07 July 2025 20:15:16 -0400 (0:00:00.858) 0:00:11.064 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:15:16 -0400 (0:00:00.030) 0:00:11.095 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:15:16 -0400 (0:00:00.038) 0:00:11.133 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:15:16 -0400 (0:00:00.036) 0:00:11.170 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:15:16 -0400 (0:00:00.027) 0:00:11.198 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:15:16 -0400 (0:00:00.044) 0:00:11.242 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:16 -0400 (0:00:00.054) 0:00:11.296 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:16 -0400 (0:00:00.032) 0:00:11.328 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:16 -0400 (0:00:00.033) 0:00:11.362 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:16 -0400 (0:00:00.040) 0:00:11.403 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:17 -0400 (0:00:00.385) 0:00:11.788 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:17 -0400 (0:00:00.063) 0:00:11.851 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:17 -0400 (0:00:00.030) 0:00:11.882 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:17 -0400 (0:00:00.030) 0:00:11.912 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:17 -0400 (0:00:00.029) 0:00:11.942 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:17 -0400 (0:00:00.029) 0:00:11.972 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:17 -0400 (0:00:00.029) 0:00:12.001 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:17 -0400 (0:00:00.029) 0:00:12.031 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:15:17 -0400 (0:00:00.029) 0:00:12.061 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:15:17 -0400 (0:00:00.048) 0:00:12.109 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:15:17 -0400 (0:00:00.031) 0:00:12.141 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:15:17 -0400 (0:00:00.030) 0:00:12.171 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:15:17 -0400 (0:00:00.072) 0:00:12.243 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:15:17 -0400 (0:00:00.035) 0:00:12.279 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Monday 07 July 2025 20:15:17 -0400 (0:00:00.028) 0:00:12.307 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 07 July 2025 20:15:17 -0400 (0:00:00.061) 0:00:12.368 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 07 July 2025 20:15:17 -0400 (0:00:00.048) 0:00:12.417 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 07 July 2025 20:15:17 -0400 (0:00:00.027) 0:00:12.445 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 07 July 2025 20:15:17 -0400 (0:00:00.058) 0:00:12.503 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 07 July 2025 20:15:18 -0400 (0:00:00.028) 0:00:12.532 *********** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 07 July 2025 20:15:18 -0400 (0:00:00.026) 0:00:12.558 *********** ok: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 07 July 2025 20:15:19 -0400 (0:00:01.021) 0:00:13.579 *********** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 33, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Monday 07 July 2025 20:15:19 -0400 (0:00:00.388) 0:00:13.968 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Monday 07 July 2025 20:15:19 -0400 (0:00:00.031) 0:00:13.999 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Monday 07 July 2025 20:15:19 -0400 (0:00:00.031) 0:00:14.031 *********** changed: [managed-node2] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/etc/containers/systemd/quadlet-pod-container.container", "gid": 0, "group": "root", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1751933719.570603-19951-98699047571118/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Monday 07 July 2025 20:15:20 -0400 (0:00:00.747) 0:00:14.778 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Monday 07 July 2025 20:15:21 -0400 (0:00:00.763) 0:00:15.542 *********** changed: [managed-node2] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "systemd-journald.socket basic.target quadlet-pod-pod-pod.service sysinit.target -.mount system.slice network-online.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3631038464", "EffectiveMemoryMax": "3631038464", "EffectiveTasksMax": "21944", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13715", "LimitNPROCSoft": "13715", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13715", "LimitSIGPENDINGSoft": "13715", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3058184192", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "-.mount sysinit.target system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21944", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target quadlet-pod-pod-pod.service", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Monday 07 July 2025 20:15:21 -0400 (0:00:00.691) 0:00:16.233 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Monday 07 July 2025 20:15:21 -0400 (0:00:00.030) 0:00:16.263 *********** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Monday 07 July 2025 20:15:21 -0400 (0:00:00.026) 0:00:16.290 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Monday 07 July 2025 20:15:21 -0400 (0:00:00.029) 0:00:16.319 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:40 Monday 07 July 2025 20:15:21 -0400 (0:00:00.058) 0:00:16.378 *********** ok: [managed-node2] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:01.003832", "end": "2025-07-07 20:15:23.194884", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-07-07 20:15:22.191052" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node2] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/etc/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:01.003745", "end": "2025-07-07 20:15:24.544251", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-07-07 20:15:23.540506" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:49 Monday 07 July 2025 20:15:24 -0400 (0:00:02.740) 0:00:19.119 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.035597", "end": "2025-07-07 20:15:24.954918", "failed_when_result": false, "rc": 0, "start": "2025-07-07 20:15:24.919321" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Create user for testing] ************************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:57 Monday 07 July 2025 20:15:25 -0400 (0:00:00.415) 0:00:19.535 *********** changed: [managed-node2] => { "changed": true, "comment": "", "create_home": true, "group": 2223, "home": "/home/user_quadlet_pod", "name": "user_quadlet_pod", "shell": "/bin/bash", "state": "present", "system": false, "uid": 2223 } TASK [Run the role - user] ***************************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:62 Monday 07 July 2025 20:15:25 -0400 (0:00:00.602) 0:00:20.138 *********** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:15:25 -0400 (0:00:00.175) 0:00:20.313 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:15:25 -0400 (0:00:00.053) 0:00:20.366 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:15:25 -0400 (0:00:00.035) 0:00:20.401 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:15:25 -0400 (0:00:00.027) 0:00:20.429 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:15:25 -0400 (0:00:00.028) 0:00:20.458 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:15:25 -0400 (0:00:00.029) 0:00:20.488 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:15:26 -0400 (0:00:00.027) 0:00:20.515 *********** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:15:26 -0400 (0:00:00.063) 0:00:20.578 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:15:27 -0400 (0:00:00.936) 0:00:21.515 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:15:27 -0400 (0:00:00.029) 0:00:21.544 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:15:27 -0400 (0:00:00.034) 0:00:21.578 *********** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:15:27 -0400 (0:00:00.029) 0:00:21.608 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:15:27 -0400 (0:00:00.031) 0:00:21.640 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:15:27 -0400 (0:00:00.041) 0:00:21.681 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.023491", "end": "2025-07-07 20:15:27.522745", "rc": 0, "start": "2025-07-07 20:15:27.499254" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:15:27 -0400 (0:00:00.415) 0:00:22.096 *********** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:15:27 -0400 (0:00:00.081) 0:00:22.177 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:15:27 -0400 (0:00:00.031) 0:00:22.209 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:15:27 -0400 (0:00:00.035) 0:00:22.245 *********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:15:27 -0400 (0:00:00.043) 0:00:22.289 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:15:27 -0400 (0:00:00.049) 0:00:22.338 *********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:15:27 -0400 (0:00:00.047) 0:00:22.385 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:27 -0400 (0:00:00.054) 0:00:22.439 *********** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "user_quadlet_pod": [ "x", "2223", "2223", "", "/home/user_quadlet_pod", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:28 -0400 (0:00:00.383) 0:00:22.822 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:28 -0400 (0:00:00.038) 0:00:22.861 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:28 -0400 (0:00:00.041) 0:00:22.902 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:28 -0400 (0:00:00.386) 0:00:23.289 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003884", "end": "2025-07-07 20:15:29.114222", "rc": 0, "start": "2025-07-07 20:15:29.110338" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:29 -0400 (0:00:00.419) 0:00:23.708 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005022", "end": "2025-07-07 20:15:29.541917", "rc": 0, "start": "2025-07-07 20:15:29.536895" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:29 -0400 (0:00:00.403) 0:00:24.111 *********** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:29 -0400 (0:00:00.045) 0:00:24.157 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:29 -0400 (0:00:00.029) 0:00:24.187 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:29 -0400 (0:00:00.029) 0:00:24.216 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:29 -0400 (0:00:00.029) 0:00:24.246 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:29 -0400 (0:00:00.061) 0:00:24.307 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:15:29 -0400 (0:00:00.031) 0:00:24.339 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/root/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0700", "__podman_parent_path": "/root/.config/containers", "__podman_policy_json_file": "/root/.config/containers/policy.json", "__podman_registries_conf_file": "/root/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/root/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:15:29 -0400 (0:00:00.038) 0:00:24.378 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:15:29 -0400 (0:00:00.053) 0:00:24.432 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:15:29 -0400 (0:00:00.029) 0:00:24.461 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:15:29 -0400 (0:00:00.029) 0:00:24.490 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:15:30 -0400 (0:00:00.053) 0:00:24.543 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:15:30 -0400 (0:00:00.029) 0:00:24.572 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:15:30 -0400 (0:00:00.029) 0:00:24.601 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:15:30 -0400 (0:00:00.055) 0:00:24.657 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:15:30 -0400 (0:00:00.042) 0:00:24.700 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:15:30 -0400 (0:00:00.033) 0:00:24.733 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:15:30 -0400 (0:00:00.097) 0:00:24.831 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:15:30 -0400 (0:00:00.049) 0:00:24.880 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:15:30 -0400 (0:00:00.094) 0:00:24.975 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:15:30 -0400 (0:00:00.051) 0:00:25.026 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:15:30 -0400 (0:00:00.049) 0:00:25.075 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:15:30 -0400 (0:00:00.049) 0:00:25.125 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:15:30 -0400 (0:00:00.039) 0:00:25.165 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:15:30 -0400 (0:00:00.032) 0:00:25.198 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:15:30 -0400 (0:00:00.031) 0:00:25.229 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:15:30 -0400 (0:00:00.033) 0:00:25.262 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:15:30 -0400 (0:00:00.026) 0:00:25.289 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:15:30 -0400 (0:00:00.028) 0:00:25.317 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:15:30 -0400 (0:00:00.078) 0:00:25.395 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Pod": { "PodName": "quadlet-pod" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:15:30 -0400 (0:00:00.037) 0:00:25.433 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:15:30 -0400 (0:00:00.036) 0:00:25.469 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:15:30 -0400 (0:00:00.030) 0:00:25.499 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-pod", "__podman_quadlet_type": "pod", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:15:31 -0400 (0:00:00.057) 0:00:25.557 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:31 -0400 (0:00:00.091) 0:00:25.648 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:31 -0400 (0:00:00.056) 0:00:25.705 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:31 -0400 (0:00:00.103) 0:00:25.808 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:31 -0400 (0:00:00.067) 0:00:25.876 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:31 -0400 (0:00:00.417) 0:00:26.294 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003721", "end": "2025-07-07 20:15:32.101268", "rc": 0, "start": "2025-07-07 20:15:32.097547" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:32 -0400 (0:00:00.397) 0:00:26.691 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005307", "end": "2025-07-07 20:15:32.531947", "rc": 0, "start": "2025-07-07 20:15:32.526640" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:32 -0400 (0:00:00.431) 0:00:27.123 *********** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:32 -0400 (0:00:00.073) 0:00:27.196 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:32 -0400 (0:00:00.051) 0:00:27.248 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:32 -0400 (0:00:00.052) 0:00:27.300 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:32 -0400 (0:00:00.051) 0:00:27.352 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:32 -0400 (0:00:00.054) 0:00:27.406 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:15:32 -0400 (0:00:00.052) 0:00:27.459 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-pod-pod.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:15:33 -0400 (0:00:00.084) 0:00:27.544 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:15:33 -0400 (0:00:00.053) 0:00:27.597 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:15:33 -0400 (0:00:00.052) 0:00:27.650 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:15:33 -0400 (0:00:00.117) 0:00:27.768 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:15:33 -0400 (0:00:00.058) 0:00:27.827 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Monday 07 July 2025 20:15:33 -0400 (0:00:00.045) 0:00:27.872 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 07 July 2025 20:15:33 -0400 (0:00:00.117) 0:00:27.990 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 07 July 2025 20:15:33 -0400 (0:00:00.123) 0:00:28.113 *********** changed: [managed-node2] => { "changed": true, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": "0:00:00.014154", "end": "2025-07-07 20:15:33.959641", "rc": 0, "start": "2025-07-07 20:15:33.945487" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 07 July 2025 20:15:34 -0400 (0:00:00.441) 0:00:28.555 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 07 July 2025 20:15:34 -0400 (0:00:00.037) 0:00:28.592 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 07 July 2025 20:15:34 -0400 (0:00:00.032) 0:00:28.624 *********** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 07 July 2025 20:15:34 -0400 (0:00:00.025) 0:00:28.650 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 07 July 2025 20:15:34 -0400 (0:00:00.027) 0:00:28.678 *********** changed: [managed-node2] => { "changed": true, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 6, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Monday 07 July 2025 20:15:34 -0400 (0:00:00.384) 0:00:29.063 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Monday 07 July 2025 20:15:34 -0400 (0:00:00.032) 0:00:29.095 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Monday 07 July 2025 20:15:34 -0400 (0:00:00.031) 0:00:29.127 *********** changed: [managed-node2] => { "changed": true, "checksum": "1884c880482430d8bf2e944b003734fb8b7a462d", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "43c9e9c2ff3ad9cd27c1f2d12f03aee0", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1751933734.666246-20447-234683972003494/.source.pod", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Monday 07 July 2025 20:15:35 -0400 (0:00:00.733) 0:00:29.861 *********** [WARNING]: Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Monday 07 July 2025 20:15:36 -0400 (0:00:00.652) 0:00:30.513 *********** changed: [managed-node2] => { "changed": true, "name": "quadlet-pod-pod-pod.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount run-user-2223.mount app.slice basic.target podman-user-wait-network-online.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-pod-pod-pod.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3631038464", "EffectiveMemoryMax": "3631038464", "EffectiveTasksMax": "21944", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod start --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPre": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartPreEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod create --infra-conmon-pidfile=/run/user/2223/quadlet-pod-pod-pod.pid --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod stop --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --time=10 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman pod rm --pod-id-file=/run/user/2223/quadlet-pod-pod-pod.pod-id --ignore --force ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-pod-pod.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-pod-pod.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13715", "LimitNPROCSoft": "13715", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13715", "LimitSIGPENDINGSoft": "13715", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3625492480", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-pod-pod.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/user/2223/quadlet-pod-pod-pod.pid", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "basic.target app.slice", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "on-failure", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-pod-pod", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21944", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "forking", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Monday 07 July 2025 20:15:37 -0400 (0:00:00.993) 0:00:31.507 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:15:37 -0400 (0:00:00.031) 0:00:31.538 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:15:37 -0400 (0:00:00.037) 0:00:31.576 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:15:37 -0400 (0:00:00.035) 0:00:31.612 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:15:37 -0400 (0:00:00.032) 0:00:31.644 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:15:37 -0400 (0:00:00.075) 0:00:31.719 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:37 -0400 (0:00:00.091) 0:00:31.811 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:37 -0400 (0:00:00.108) 0:00:31.920 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:37 -0400 (0:00:00.058) 0:00:31.978 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:37 -0400 (0:00:00.070) 0:00:32.049 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:37 -0400 (0:00:00.421) 0:00:32.470 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003469", "end": "2025-07-07 20:15:38.302814", "rc": 0, "start": "2025-07-07 20:15:38.299345" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:38 -0400 (0:00:00.423) 0:00:32.894 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005036", "end": "2025-07-07 20:15:38.744261", "rc": 0, "start": "2025-07-07 20:15:38.739225" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:38 -0400 (0:00:00.422) 0:00:33.316 *********** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:38 -0400 (0:00:00.048) 0:00:33.365 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:38 -0400 (0:00:00.031) 0:00:33.396 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:38 -0400 (0:00:00.029) 0:00:33.426 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:38 -0400 (0:00:00.030) 0:00:33.456 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:38 -0400 (0:00:00.029) 0:00:33.485 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:15:39 -0400 (0:00:00.029) 0:00:33.515 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:15:39 -0400 (0:00:00.052) 0:00:33.568 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:15:39 -0400 (0:00:00.050) 0:00:33.618 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:15:39 -0400 (0:00:00.054) 0:00:33.673 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:15:39 -0400 (0:00:00.119) 0:00:33.792 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:15:39 -0400 (0:00:00.057) 0:00:33.850 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Monday 07 July 2025 20:15:39 -0400 (0:00:00.047) 0:00:33.897 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 07 July 2025 20:15:39 -0400 (0:00:00.113) 0:00:34.011 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 07 July 2025 20:15:39 -0400 (0:00:00.138) 0:00:34.149 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "loginctl", "enable-linger", "user_quadlet_pod" ], "delta": null, "end": null, "rc": 0, "start": null } STDOUT: skipped, since /var/lib/systemd/linger/user_quadlet_pod exists MSG: Did not run command since '/var/lib/systemd/linger/user_quadlet_pod' exists TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 07 July 2025 20:15:40 -0400 (0:00:00.418) 0:00:34.568 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 07 July 2025 20:15:40 -0400 (0:00:00.053) 0:00:34.621 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_item_state | d('present') == 'absent'", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 07 July 2025 20:15:40 -0400 (0:00:00.046) 0:00:34.667 *********** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 07 July 2025 20:15:40 -0400 (0:00:00.033) 0:00:34.701 *********** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 07 July 2025 20:15:41 -0400 (0:00:01.343) 0:00:36.045 *********** ok: [managed-node2] => { "changed": false, "gid": 2223, "group": "user_quadlet_pod", "mode": "0755", "owner": "user_quadlet_pod", "path": "/home/user_quadlet_pod/.config/containers/systemd", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 33, "state": "directory", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Monday 07 July 2025 20:15:41 -0400 (0:00:00.409) 0:00:36.454 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Monday 07 July 2025 20:15:42 -0400 (0:00:00.053) 0:00:36.508 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Monday 07 July 2025 20:15:42 -0400 (0:00:00.050) 0:00:36.559 *********** changed: [managed-node2] => { "changed": true, "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "dest": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "gid": 2223, "group": "user_quadlet_pod", "md5sum": "daaf6e904ff3c17edeb801084cfe256f", "mode": "0644", "owner": "user_quadlet_pod", "secontext": "unconfined_u:object_r:config_home_t:s0", "size": 230, "src": "/root/.ansible/tmp/ansible-tmp-1751933742.1096382-20721-81203128614920/.source.container", "state": "file", "uid": 2223 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Monday 07 July 2025 20:15:42 -0400 (0:00:00.779) 0:00:37.338 *********** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Monday 07 July 2025 20:15:43 -0400 (0:00:00.691) 0:00:38.030 *********** changed: [managed-node2] => { "changed": true, "name": "quadlet-pod-container.service", "state": "started", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "run-user-2223.mount quadlet-pod-pod-pod.service podman-user-wait-network-online.service app.slice basic.target -.mount", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "default.target shutdown.target", "BindLogSockets": "no", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3631038464", "EffectiveMemoryMax": "3631038464", "EffectiveTasksMax": "21944", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13715", "LimitNPROCSoft": "13715", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13715", "LimitSIGPENDINGSoft": "13715", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3588067328", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21944", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service default.target", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Monday 07 July 2025 20:15:44 -0400 (0:00:00.748) 0:00:38.778 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Monday 07 July 2025 20:15:44 -0400 (0:00:00.031) 0:00:38.810 *********** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Monday 07 July 2025 20:15:44 -0400 (0:00:00.025) 0:00:38.836 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Monday 07 July 2025 20:15:44 -0400 (0:00:00.026) 0:00:38.862 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check files] ************************************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:70 Monday 07 July 2025 20:15:44 -0400 (0:00:00.041) 0:00:38.904 *********** ok: [managed-node2] => (item=quadlet-pod-container.container) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container" ], "delta": "0:00:00.002761", "end": "2025-07-07 20:15:44.708414", "item": "quadlet-pod-container.container", "rc": 0, "start": "2025-07-07 20:15:44.705653" } STDOUT: # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 ok: [managed-node2] => (item=quadlet-pod-pod.pod) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "cat", "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod" ], "delta": "0:00:00.002961", "end": "2025-07-07 20:15:45.056203", "item": "quadlet-pod-pod.pod", "rc": 0, "start": "2025-07-07 20:15:45.053242" } STDOUT: # # Ansible managed # # system_role:podman [Pod] PodName=quadlet-pod TASK [Check pod] *************************************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:79 Monday 07 July 2025 20:15:45 -0400 (0:00:00.734) 0:00:39.638 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "inspect", "quadlet-pod", "--format", "{{range .Containers}}{{.Name}}\n{{end}}" ], "delta": "0:00:00.051567", "end": "2025-07-07 20:15:45.580337", "failed_when_result": false, "rc": 0, "start": "2025-07-07 20:15:45.528770" } STDOUT: quadlet-pod-infra quadlet-pod-container TASK [Ensure linger] *********************************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:91 Monday 07 July 2025 20:15:45 -0400 (0:00:00.591) 0:00:40.230 *********** ok: [managed-node2] => { "changed": false, "failed_when_result": false, "stat": { "atime": 1751933733.9501154, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "checksum": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "ctime": 1751933733.9501154, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 4595894, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "inode/x-empty", "mode": "0644", "mtime": 1751933733.9501154, "nlink": 1, "path": "/var/lib/systemd/linger/user_quadlet_pod", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 0, "uid": 0, "version": "3797699622", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [Cleanup user] ************************************************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:99 Monday 07 July 2025 20:15:46 -0400 (0:00:00.383) 0:00:40.613 *********** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:15:46 -0400 (0:00:00.103) 0:00:40.717 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:15:46 -0400 (0:00:00.049) 0:00:40.766 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:15:46 -0400 (0:00:00.035) 0:00:40.802 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:15:46 -0400 (0:00:00.029) 0:00:40.832 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:15:46 -0400 (0:00:00.028) 0:00:40.860 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:15:46 -0400 (0:00:00.029) 0:00:40.890 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:15:46 -0400 (0:00:00.027) 0:00:40.918 *********** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:15:46 -0400 (0:00:00.065) 0:00:40.983 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:15:47 -0400 (0:00:00.943) 0:00:41.927 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:15:47 -0400 (0:00:00.030) 0:00:41.958 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:15:47 -0400 (0:00:00.034) 0:00:41.992 *********** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:15:47 -0400 (0:00:00.064) 0:00:42.056 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:15:47 -0400 (0:00:00.028) 0:00:42.085 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:15:47 -0400 (0:00:00.028) 0:00:42.113 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024385", "end": "2025-07-07 20:15:47.944465", "rc": 0, "start": "2025-07-07 20:15:47.920080" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:15:48 -0400 (0:00:00.401) 0:00:42.515 *********** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:15:48 -0400 (0:00:00.030) 0:00:42.546 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:15:48 -0400 (0:00:00.027) 0:00:42.573 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:15:48 -0400 (0:00:00.033) 0:00:42.607 *********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:15:48 -0400 (0:00:00.038) 0:00:42.645 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:15:48 -0400 (0:00:00.051) 0:00:42.697 *********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:15:48 -0400 (0:00:00.052) 0:00:42.749 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:48 -0400 (0:00:00.055) 0:00:42.805 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:48 -0400 (0:00:00.047) 0:00:42.852 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:48 -0400 (0:00:00.043) 0:00:42.895 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:48 -0400 (0:00:00.037) 0:00:42.933 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:48 -0400 (0:00:00.377) 0:00:43.311 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.004076", "end": "2025-07-07 20:15:49.118607", "rc": 0, "start": "2025-07-07 20:15:49.114531" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:49 -0400 (0:00:00.375) 0:00:43.687 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.004684", "end": "2025-07-07 20:15:49.498026", "rc": 0, "start": "2025-07-07 20:15:49.493342" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:49 -0400 (0:00:00.416) 0:00:44.103 *********** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:49 -0400 (0:00:00.046) 0:00:44.150 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:49 -0400 (0:00:00.030) 0:00:44.181 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:49 -0400 (0:00:00.030) 0:00:44.211 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:49 -0400 (0:00:00.035) 0:00:44.247 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:49 -0400 (0:00:00.030) 0:00:44.277 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:15:49 -0400 (0:00:00.028) 0:00:44.306 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0700", "__podman_parent_path": "/home/user_quadlet_pod/.config/containers", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:15:49 -0400 (0:00:00.038) 0:00:44.345 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:15:49 -0400 (0:00:00.051) 0:00:44.397 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:15:49 -0400 (0:00:00.028) 0:00:44.425 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:15:49 -0400 (0:00:00.028) 0:00:44.453 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:15:50 -0400 (0:00:00.052) 0:00:44.505 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:15:50 -0400 (0:00:00.029) 0:00:44.534 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:15:50 -0400 (0:00:00.028) 0:00:44.563 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:15:50 -0400 (0:00:00.052) 0:00:44.615 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:15:50 -0400 (0:00:00.027) 0:00:44.643 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:15:50 -0400 (0:00:00.063) 0:00:44.706 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:15:50 -0400 (0:00:00.059) 0:00:44.766 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:15:50 -0400 (0:00:00.030) 0:00:44.796 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:15:50 -0400 (0:00:00.029) 0:00:44.825 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:15:50 -0400 (0:00:00.028) 0:00:44.854 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:15:50 -0400 (0:00:00.028) 0:00:44.882 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:15:50 -0400 (0:00:00.028) 0:00:44.910 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:15:50 -0400 (0:00:00.028) 0:00:44.939 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:15:50 -0400 (0:00:00.027) 0:00:44.966 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:15:50 -0400 (0:00:00.025) 0:00:44.992 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:15:50 -0400 (0:00:00.024) 0:00:45.017 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:15:50 -0400 (0:00:00.026) 0:00:45.043 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:15:50 -0400 (0:00:00.024) 0:00:45.067 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:15:50 -0400 (0:00:00.082) 0:00:45.150 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:15:50 -0400 (0:00:00.037) 0:00:45.187 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:15:50 -0400 (0:00:00.036) 0:00:45.223 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:15:50 -0400 (0:00:00.033) 0:00:45.256 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:15:50 -0400 (0:00:00.075) 0:00:45.332 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:50 -0400 (0:00:00.055) 0:00:45.387 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:50 -0400 (0:00:00.031) 0:00:45.419 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:50 -0400 (0:00:00.030) 0:00:45.449 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:50 -0400 (0:00:00.040) 0:00:45.490 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:51 -0400 (0:00:00.377) 0:00:45.867 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003447", "end": "2025-07-07 20:15:51.673469", "rc": 0, "start": "2025-07-07 20:15:51.670022" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:51 -0400 (0:00:00.374) 0:00:46.242 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005014", "end": "2025-07-07 20:15:52.055512", "rc": 0, "start": "2025-07-07 20:15:52.050498" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:52 -0400 (0:00:00.380) 0:00:46.623 *********** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:52 -0400 (0:00:00.044) 0:00:46.667 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:52 -0400 (0:00:00.029) 0:00:46.697 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:52 -0400 (0:00:00.029) 0:00:46.727 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:52 -0400 (0:00:00.030) 0:00:46.757 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:52 -0400 (0:00:00.028) 0:00:46.786 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:15:52 -0400 (0:00:00.030) 0:00:46.816 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:15:52 -0400 (0:00:00.048) 0:00:46.865 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:15:52 -0400 (0:00:00.033) 0:00:46.898 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:15:52 -0400 (0:00:00.027) 0:00:46.925 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:15:52 -0400 (0:00:00.071) 0:00:46.997 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:15:52 -0400 (0:00:00.035) 0:00:47.033 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 07 July 2025 20:15:52 -0400 (0:00:00.104) 0:00:47.138 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933733.9971163, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1751933744.1192975, "dev": 76, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1751933744.1192975, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 220, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 07 July 2025 20:15:53 -0400 (0:00:00.382) 0:00:47.520 *********** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Mon 2025-07-07 20:15:44 EDT", "ActiveEnterTimestampMonotonic": "761486809", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "run-user-2223.mount quadlet-pod-pod-pod.service podman-user-wait-network-online.service app.slice basic.target -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-07-07 20:15:44 EDT", "AssertTimestampMonotonic": "761358250", "Before": "default.target shutdown.target", "BindLogSockets": "no", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "88476000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-07-07 20:15:44 EDT", "ConditionTimestampMonotonic": "761358245", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroup": "/user.slice/user-2223.slice/user@2223.service/app.slice/quadlet-pod-container.service", "ControlGroupId": "13011", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3631038464", "EffectiveMemoryMax": "3631038464", "EffectiveTasksMax": "21944", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "65646", "ExecMainStartTimestamp": "Mon 2025-07-07 20:15:44 EDT", "ExecMainStartTimestampMonotonic": "761440001", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Mon 2025-07-07 20:15:44 EDT] ; stop_time=[n/a] ; pid=65636 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Mon 2025-07-07 20:15:44 EDT] ; stop_time=[n/a] ; pid=65636 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Mon 2025-07-07 20:15:44 EDT", "InactiveExitTimestampMonotonic": "761365330", "InvocationID": "408ca3a662f84f11bc69eeaa02ba23d4", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13715", "LimitNPROCSoft": "13715", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13715", "LimitSIGPENDINGSoft": "13715", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "65646", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3587125248", "MemoryCurrent": "905216", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "21110784", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Mon 2025-07-07 20:15:44 EDT", "StateChangeTimestampMonotonic": "761486809", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "21944", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service default.target", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:34 Monday 07 July 2025 20:16:03 -0400 (0:00:10.952) 0:00:58.472 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933743.4072847, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1751933742.746273, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 75497741, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751933742.4712682, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 2223, "version": "446493906", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:39 Monday 07 July 2025 20:16:04 -0400 (0:00:00.385) 0:00:58.858 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Monday 07 July 2025 20:16:04 -0400 (0:00:00.052) 0:00:58.910 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Monday 07 July 2025 20:16:04 -0400 (0:00:00.443) 0:00:59.354 *********** fatal: [managed-node2]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } TASK [Debug3] ****************************************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:127 Monday 07 July 2025 20:16:04 -0400 (0:00:00.043) 0:00:59.398 *********** ok: [managed-node2] => { "changed": false, "cmd": "set -x\nset -o pipefail\nexec 1>&2\n#podman volume rm --all\n#podman network prune -f\npodman volume ls\npodman network ls\npodman secret ls\npodman container ls\npodman pod ls\npodman images\nsystemctl list-units | grep quadlet\nsystemctl list-unit-files | grep quadlet\nls -alrtF /etc/containers/systemd\n/usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log\n", "delta": "0:00:00.596685", "end": "2025-07-07 20:16:05.789639", "rc": 0, "start": "2025-07-07 20:16:05.192954" } STDERR: + set -o pipefail + exec + podman volume ls DRIVER VOLUME NAME + podman network ls NETWORK ID NAME DRIVER 2f259bab93aa podman bridge 51bfc940b19a podman-default-kube-network bridge + podman secret ls ID NAME DRIVER CREATED UPDATED + podman container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES 6195e69957a5 49 seconds ago Up 49 seconds quadlet-pod-infra 39e7825a90b7 quay.io/libpod/testimage:20210610 /bin/busybox-extr... 43 seconds ago Up 44 seconds quadlet-pod-container + podman pod ls POD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS fad3aff1d9a9 quadlet-pod Running 49 seconds ago 6195e69957a5 2 + podman images REPOSITORY TAG IMAGE ID CREATED SIZE quay.io/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB + systemctl list-units + grep quadlet quadlet-pod-container.service loaded active running quadlet-pod-container.service quadlet-pod-pod-pod.service loaded active running quadlet-pod-pod-pod.service + systemctl list-unit-files + grep quadlet quadlet-pod-container.service generated - quadlet-pod-pod-pod.service generated - + ls -alrtF /etc/containers/systemd total 8 drwxr-xr-x. 9 root root 178 Jul 7 20:10 ../ -rw-r--r--. 1 root root 70 Jul 7 20:15 quadlet-pod-pod.pod -rw-r--r--. 1 root root 230 Jul 7 20:15 quadlet-pod-container.container drwxr-xr-x. 2 root root 72 Jul 7 20:15 ./ + /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log quadlet-generator[68496]: Loading source unit file /etc/containers/systemd/quadlet-pod-container.container quadlet-generator[68496]: Loading source unit file /etc/containers/systemd/quadlet-pod-pod.pod ---quadlet-pod-container.service--- # # Ansible managed # # system_role:podman [Install] WantedBy=default.target [X-Container] Image=quay.io/libpod/testimage:20210610 ContainerName=quadlet-pod-container Pod=quadlet-pod-pod.pod Exec=/bin/busybox-extras httpd -f -p 80 [Unit] Wants=network-online.target After=network-online.target SourcePath=/etc/containers/systemd/quadlet-pod-container.container RequiresMountsFor=%t/containers BindsTo=quadlet-pod-pod-pod.service After=quadlet-pod-pod-pod.service [Service] Environment=PODMAN_SYSTEMD_UNIT=%n KillMode=mixed ExecStop=/usr/bin/podman rm -v -f -i --cidfile=%t/%N.cid ExecStopPost=-/usr/bin/podman rm -v -f -i --cidfile=%t/%N.cid Delegate=yes Type=notify NotifyAccess=all SyslogIdentifier=%N ExecStart=/usr/bin/podman run --name quadlet-pod-container --cidfile=%t/%N.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file %t/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ---quadlet-pod-pod-pod.service--- # # Ansible managed # # system_role:podman [X-Pod] PodName=quadlet-pod [Unit] Wants=network-online.target After=network-online.target SourcePath=/etc/containers/systemd/quadlet-pod-pod.pod RequiresMountsFor=%t/containers Wants=quadlet-pod-container.service Before=quadlet-pod-container.service [Service] SyslogIdentifier=%N ExecStart=/usr/bin/podman pod start --pod-id-file=%t/%N.pod-id ExecStop=/usr/bin/podman pod stop --pod-id-file=%t/%N.pod-id --ignore --time=10 ExecStopPost=/usr/bin/podman pod rm --pod-id-file=%t/%N.pod-id --ignore --force ExecStartPre=/usr/bin/podman pod create --infra-conmon-pidfile=%t/%N.pid --pod-id-file=%t/%N.pod-id --exit-policy=stop --replace --infra-name quadlet-pod-infra --name quadlet-pod Environment=PODMAN_SYSTEMD_UNIT=%n Type=forking Restart=on-failure PIDFile=%t/%N.pid TASK [Check AVCs] ************************************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:146 Monday 07 July 2025 20:16:05 -0400 (0:00:00.963) 0:01:00.362 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "grep", "type=AVC", "/var/log/audit/audit.log" ], "delta": "0:00:00.005292", "end": "2025-07-07 20:16:06.167115", "failed_when_result": false, "rc": 0, "start": "2025-07-07 20:16:06.161823" } STDOUT: type=AVC msg=audit(1751932990.296:31): avc: denied { setpcap } for pid=655 comm="irqbalance" capability=8 scontext=system_u:system_r:irqbalance_t:s0 tcontext=system_u:system_r:irqbalance_t:s0 tclass=cap_userns permissive=0 TASK [Dump journal] ************************************************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:151 Monday 07 July 2025 20:16:06 -0400 (0:00:00.373) 0:01:00.735 *********** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.032124", "end": "2025-07-07 20:16:06.564883", "failed_when_result": true, "rc": 0, "start": "2025-07-07 20:16:06.532759" } STDOUT: Jul 07 20:11:58 managed-node2 aardvark-dns[28871]: Received SIGHUP Jul 07 20:11:58 managed-node2 aardvark-dns[28871]: Successfully parsed config Jul 07 20:11:58 managed-node2 aardvark-dns[28871]: Listen v4 ip {} Jul 07 20:11:58 managed-node2 aardvark-dns[28871]: Listen v6 ip {} Jul 07 20:11:58 managed-node2 aardvark-dns[28871]: No configuration found stopping the sever Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:11:58 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:11:58 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d)" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=info msg="Using sqlite as database backend" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="systemd-logind: Unknown object '/'." Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using graph driver overlay" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using run root /run/user/3001/containers" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using transient store: false" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Initializing event backend file" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=info msg="Setting parallel job count to 7" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d)" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Shutting down engines" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=29568 Jul 07 20:11:58 managed-node2 systemd[27434]: Stopped libpod-conmon-82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Jul 07 20:11:58 managed-node2 systemd[27434]: Removed slice user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice - cgroup user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Jul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory Jul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory Jul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory Jul 07 20:11:58 managed-node2 podman[29546]: Pods stopped: Jul 07 20:11:58 managed-node2 podman[29546]: 033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824 Jul 07 20:11:58 managed-node2 podman[29546]: Pods removed: Jul 07 20:11:58 managed-node2 podman[29546]: 033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824 Jul 07 20:11:58 managed-node2 podman[29546]: Secrets removed: Jul 07 20:11:58 managed-node2 podman[29546]: Volumes removed: Jul 07 20:11:58 managed-node2 systemd[27434]: Created slice user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice - cgroup user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-b68ee141dcf27814664a590d406043e669f2802be350ecb975174d3342b509fb.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Jul 07 20:11:58 managed-node2 systemd[27434]: Started rootless-netns-e8ce431d.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:11:58 managed-node2 kernel: veth0: entered allmulticast mode Jul 07 20:11:58 managed-node2 kernel: veth0: entered promiscuous mode Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jul 07 20:11:58 managed-node2 systemd[27434]: Started run-p29607-i29907.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Jul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-7f0ec0a3585717aa5dc0675fd2d3f2fb67af120ad764f3608d3ed615db7ae81d.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Jul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-d63f16e390f387a3579fe8190c8b955ae0c9b5f12e78ccbea11550f14575c651.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jul 07 20:11:58 managed-node2 podman[29546]: Pod: Jul 07 20:11:58 managed-node2 podman[29546]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6 Jul 07 20:11:58 managed-node2 podman[29546]: Container: Jul 07 20:11:58 managed-node2 podman[29546]: d63f16e390f387a3579fe8190c8b955ae0c9b5f12e78ccbea11550f14575c651 Jul 07 20:11:58 managed-node2 systemd[27434]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jul 07 20:11:58 managed-node2 sudo[29540]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:11:59 managed-node2 python3.12[29790]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:11:59 managed-node2 python3.12[29946]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:01 managed-node2 python3.12[30103]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:02 managed-node2 python3.12[30259]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:02 managed-node2 python3.12[30414]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:12:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:12:03 managed-node2 podman[30600]: 2025-07-07 20:12:03.454853091 -0400 EDT m=+0.369862355 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:03 managed-node2 python3.12[30769]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:12:04 managed-node2 python3.12[30924]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:04 managed-node2 python3.12[31079]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:12:05 managed-node2 python3.12[31204]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933524.4345405-13159-278912456486895/.source.yml _original_basename=.wig720_j follow=False checksum=b4126723a3845d354fb7beda3b3f44919cb02dd7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.511414005 -0400 EDT m=+0.019368086 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:12:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice - cgroup machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice. ░░ Subject: A start job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished successfully. ░░ ░░ The job identifier is 1968. Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.55953636 -0400 EDT m=+0.067490307 container create a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.565742112 -0400 EDT m=+0.073696026 pod create 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.592586777 -0400 EDT m=+0.100540709 container create f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.567663828 -0400 EDT m=+0.075617914 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:12:05 managed-node2 kernel: veth0: entered allmulticast mode Jul 07 20:12:05 managed-node2 kernel: veth0: entered promiscuous mode Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6188] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6207] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jul 07 20:12:05 managed-node2 (udev-worker)[31379]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jul 07 20:12:05 managed-node2 (udev-worker)[31378]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6283] device (veth0): carrier: link connected Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6285] device (podman1): carrier: link connected Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6369] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6382] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6387] device (podman1): Activation: starting connection 'podman1' (1839a03b-e916-4e56-ad20-699cf8b9a55a) Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6389] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6402] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6406] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6413] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1974. Jul 07 20:12:05 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1974. Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7043] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7046] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7051] device (podman1): Activation: successful, device activated. Jul 07 20:12:05 managed-node2 systemd[1]: Started run-p31412-i31712.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p31412-i31712.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p31412-i31712.scope has finished successfully. ░░ ░░ The job identifier is 2053. Jul 07 20:12:05 managed-node2 aardvark-dns[31412]: starting aardvark on a child with pid 31419 Jul 07 20:12:05 managed-node2 aardvark-dns[31419]: Successfully parsed config Jul 07 20:12:05 managed-node2 aardvark-dns[31419]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jul 07 20:12:05 managed-node2 aardvark-dns[31419]: Listen v6 ip {} Jul 07 20:12:05 managed-node2 aardvark-dns[31419]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Jul 07 20:12:05 managed-node2 systemd[1]: Started libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope. ░░ Subject: A start job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully. ░░ ░░ The job identifier is 2059. Jul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : terminal_ctrl_fd: 12 Jul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : winsz read side: 16, winsz write side: 17 Jul 07 20:12:05 managed-node2 systemd[1]: Started libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope - libcrun container. ░░ Subject: A start job for unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully. ░░ ░░ The job identifier is 2066. Jul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : container PID: 31426 Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.796580679 -0400 EDT m=+0.304534648 container init a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.799350929 -0400 EDT m=+0.307304949 container start a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:05 managed-node2 systemd[1]: Started libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope. ░░ Subject: A start job for unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully. ░░ ░░ The job identifier is 2073. Jul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/11/attach} Jul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : terminal_ctrl_fd: 11 Jul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : winsz read side: 15, winsz write side: 16 Jul 07 20:12:05 managed-node2 systemd[1]: Started libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope - libcrun container. ░░ Subject: A start job for unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully. ░░ ░░ The job identifier is 2080. Jul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : container PID: 31431 Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.854777496 -0400 EDT m=+0.362731459 container init f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.857558887 -0400 EDT m=+0.365512915 container start f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.863542588 -0400 EDT m=+0.371496538 pod start 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2) Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b Container: f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-07T20:12:05-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-07T20:12:05-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-07T20:12:05-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-07T20:12:05-04:00" level=info msg="Using sqlite as database backend" time="2025-07-07T20:12:05-04:00" level=debug msg="Using graph driver overlay" time="2025-07-07T20:12:05-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-07T20:12:05-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-07T20:12:05-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-07T20:12:05-04:00" level=debug msg="Using transient store: false" time="2025-07-07T20:12:05-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-07T20:12:05-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-07T20:12:05-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-07T20:12:05-04:00" level=debug msg="Initializing event backend journald" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-07-07T20:12:05-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-07T20:12:05-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba bridge podman1 2025-07-07 20:10:03.41385383 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-07T20:12:05-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-07T20:12:05-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string" time="2025-07-07T20:12:05-04:00" level=debug msg="using systemd mode: false" time="2025-07-07T20:12:05-04:00" level=debug msg="setting container name 2dbe050d31da-infra" time="2025-07-07T20:12:05-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Allocated lock 1 for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2025-07-07T20:12:05-04:00" level=debug msg="Created container \"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Container \"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\" has work directory \"/var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Container \"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\" has run directory \"/run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-07T20:12:05-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-07T20:12:05-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-07T20:12:05-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-07T20:12:05-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-07T20:12:05-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-07T20:12:05-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-07T20:12:05-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-07T20:12:05-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-07T20:12:05-04:00" level=debug msg="using systemd mode: false" time="2025-07-07T20:12:05-04:00" level=debug msg="adding container to pod httpd2" time="2025-07-07T20:12:05-04:00" level=debug msg="setting container name httpd2-httpd2" time="2025-07-07T20:12:05-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-07T20:12:05-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /proc" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /dev" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /sys" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-07T20:12:05-04:00" level=debug msg="Allocated lock 2 for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Created container \"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Container \"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\" has work directory \"/var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Container \"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\" has run directory \"/run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Strongconnecting node a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="Pushed a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 onto stack" time="2025-07-07T20:12:05-04:00" level=debug msg="Finishing node a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307. Popped a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 off stack" time="2025-07-07T20:12:05-04:00" level=debug msg="Strongconnecting node f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b" time="2025-07-07T20:12:05-04:00" level=debug msg="Pushed f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b onto stack" time="2025-07-07T20:12:05-04:00" level=debug msg="Finishing node f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b. Popped f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b off stack" time="2025-07-07T20:12:05-04:00" level=debug msg="Made network namespace at /run/netns/netns-1ce7c5df-883b-4dd4-e4c5-5e8b3ad8b1f6 for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="Created root filesystem for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 at /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/rootfs/merge" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_51bfc940_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "ce:aa:8c:aa:62:92", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-07-07T20:12:05-04:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-07-07T20:12:05-04:00" level=debug msg="Setting Cgroups for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 to machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice:libpod:a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-07T20:12:05-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/rootfs/merge\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Created OCI spec for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 at /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/config.json" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-07T20:12:05-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 -u a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata -p /run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/pidfile -n 2dbe050d31da-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307]" time="2025-07-07T20:12:05-04:00" level=info msg="Running conmon under slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice and unitName libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope" time="2025-07-07T20:12:05-04:00" level=debug msg="Received: 31426" time="2025-07-07T20:12:05-04:00" level=info msg="Got Conmon PID as 31424" time="2025-07-07T20:12:05-04:00" level=debug msg="Created container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 in OCI runtime" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-07T20:12:05-04:00" level=debug msg="Starting container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 with command [/catatonit -P]" time="2025-07-07T20:12:05-04:00" level=debug msg="Started container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/66KUKQ3YMIWXUMPTPGDU24SJUU,upperdir=/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/diff,workdir=/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c198,c290\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Mounted container \"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\" at \"/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/merged\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Created root filesystem for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b at /var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/merged" time="2025-07-07T20:12:05-04:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-07-07T20:12:05-04:00" level=debug msg="Setting Cgroups for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b to machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice:libpod:f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b" time="2025-07-07T20:12:05-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-07T20:12:05-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-07T20:12:05-04:00" level=debug msg="Created OCI spec for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b at /var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/config.json" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-07T20:12:05-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b -u f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata -p /run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b]" time="2025-07-07T20:12:05-04:00" level=info msg="Running conmon under slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice and unitName libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope" time="2025-07-07T20:12:05-04:00" level=debug msg="Received: 31431" time="2025-07-07T20:12:05-04:00" level=info msg="Got Conmon PID as 31429" time="2025-07-07T20:12:05-04:00" level=debug msg="Created container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b in OCI runtime" time="2025-07-07T20:12:05-04:00" level=debug msg="Starting container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-07T20:12:05-04:00" level=debug msg="Started container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b" time="2025-07-07T20:12:05-04:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-07T20:12:05-04:00" level=debug msg="Shutting down engines" Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 07 20:12:06 managed-node2 python3.12[31587]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:12:06 managed-node2 systemd[1]: Reload requested from client PID 31588 ('systemctl') (unit session-5.scope)... Jul 07 20:12:06 managed-node2 systemd[1]: Reloading... Jul 07 20:12:06 managed-node2 systemd-rc-local-generator[31626]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:06 managed-node2 systemd[1]: Reloading finished in 201 ms. Jul 07 20:12:07 managed-node2 python3.12[31800]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jul 07 20:12:07 managed-node2 systemd[1]: Reload requested from client PID 31803 ('systemctl') (unit session-5.scope)... Jul 07 20:12:07 managed-node2 systemd[1]: Reloading... Jul 07 20:12:07 managed-node2 systemd-rc-local-generator[31851]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:07 managed-node2 systemd[1]: Reloading finished in 212 ms. Jul 07 20:12:08 managed-node2 python3.12[32015]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:12:08 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2088. Jul 07 20:12:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2087. Jul 07 20:12:08 managed-node2 podman[32019]: 2025-07-07 20:12:08.118637911 -0400 EDT m=+0.022713444 pod stop 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2) Jul 07 20:12:15 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 podman[32019]: time="2025-07-07T20:12:18-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jul 07 20:12:18 managed-node2 systemd[1]: libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : container 31431 exited with status 137 Jul 07 20:12:18 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice/libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope/container/memory.events Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.143617522 -0400 EDT m=+10.047693144 container died f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b)" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=info msg="Using sqlite as database backend" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using graph driver overlay" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using run root /run/containers/storage" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using transient store: false" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Initializing event backend journald" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=info msg="Setting parallel job count to 7" Jul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856-merged.mount has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.191389905 -0400 EDT m=+10.095465414 container cleanup f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b)" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Shutting down engines" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=32031 Jul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 systemd[1]: libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 systemd[1]: libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice/libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope/container/memory.events Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.211877781 -0400 EDT m=+10.115953439 container died a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra) Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307)" Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:12:18 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:12:18 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=info msg="Using sqlite as database backend" Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: Received SIGHUP Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: Successfully parsed config Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: Listen v4 ip {} Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: Listen v6 ip {} Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: No configuration found stopping the sever Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using graph driver overlay" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using run root /run/containers/storage" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using transient store: false" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Initializing event backend journald" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jul 07 20:12:18 managed-node2 systemd[1]: run-p31412-i31712.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p31412-i31712.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=info msg="Setting parallel job count to 7" Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.2554] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 07 20:12:18 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2172. Jul 07 20:12:18 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2172. Jul 07 20:12:18 managed-node2 systemd[1]: run-netns-netns\x2d1ce7c5df\x2d883b\x2d4dd4\x2de4c5\x2d5e8b3ad8b1f6.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d1ce7c5df\x2d883b\x2d4dd4\x2de4c5\x2d5e8b3ad8b1f6.mount has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.329738431 -0400 EDT m=+10.233814039 container cleanup a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307)" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Shutting down engines" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=32043 Jul 07 20:12:18 managed-node2 systemd[1]: libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 systemd[1]: Stopped libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope. ░░ Subject: A stop job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished. ░░ ░░ The job identifier is 2252 and the job result is done. Jul 07 20:12:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice - cgroup machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice. ░░ Subject: A stop job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished. ░░ ░░ The job identifier is 2251 and the job result is done. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.395138747 -0400 EDT m=+10.299214275 container remove f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.421655838 -0400 EDT m=+10.325731365 container remove a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:18 managed-node2 systemd[1]: machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice: Failed to open /run/systemd/transient/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice: No such file or directory Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.430187452 -0400 EDT m=+10.334262942 pod remove 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2) Jul 07 20:12:18 managed-node2 podman[32019]: Pods stopped: Jul 07 20:12:18 managed-node2 podman[32019]: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b Jul 07 20:12:18 managed-node2 podman[32019]: Pods removed: Jul 07 20:12:18 managed-node2 podman[32019]: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b Jul 07 20:12:18 managed-node2 podman[32019]: Secrets removed: Jul 07 20:12:18 managed-node2 podman[32019]: Volumes removed: Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.430564473 -0400 EDT m=+10.334640043 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.451338654 -0400 EDT m=+10.355414196 container create 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice - cgroup machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice. ░░ Subject: A start job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished successfully. ░░ ░░ The job identifier is 2253. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.492350688 -0400 EDT m=+10.396426207 container create 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.498774654 -0400 EDT m=+10.402850152 pod create 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.52421506 -0400 EDT m=+10.428290660 container create 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.524550628 -0400 EDT m=+10.428626163 container restart 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 systemd[1]: Started libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope - libcrun container. ░░ Subject: A start job for unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has finished successfully. ░░ ░░ The job identifier is 2259. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.500507692 -0400 EDT m=+10.404583357 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.584365043 -0400 EDT m=+10.488440589 container init 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.587275237 -0400 EDT m=+10.491350731 container start 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:12:18 managed-node2 kernel: veth0: entered allmulticast mode Jul 07 20:12:18 managed-node2 kernel: veth0: entered promiscuous mode Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jul 07 20:12:18 managed-node2 (udev-worker)[32052]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:18 managed-node2 (udev-worker)[32051]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6102] device (podman1): carrier: link connected Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6105] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6153] device (veth0): carrier: link connected Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6156] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6318] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6330] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6372] device (podman1): Activation: starting connection 'podman1' (d4ed169d-27e3-42b9-8610-eac77be55153) Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6374] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6376] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6377] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6380] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6494] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6511] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6517] device (podman1): Activation: successful, device activated. Jul 07 20:12:18 managed-node2 systemd[1]: Started run-p32110-i32410.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p32110-i32410.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p32110-i32410.scope has finished successfully. ░░ ░░ The job identifier is 2265. Jul 07 20:12:18 managed-node2 systemd[1]: Started libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope - libcrun container. ░░ Subject: A start job for unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has finished successfully. ░░ ░░ The job identifier is 2271. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.743416519 -0400 EDT m=+10.647492104 container init 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.745567921 -0400 EDT m=+10.649643561 container start 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 systemd[1]: Started libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope - libcrun container. ░░ Subject: A start job for unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has finished successfully. ░░ ░░ The job identifier is 2278. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.78342779 -0400 EDT m=+10.687503327 container init 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.786203333 -0400 EDT m=+10.690278905 container start 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.792190652 -0400 EDT m=+10.696266270 pod start 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2) Jul 07 20:12:18 managed-node2 podman[32019]: Pod: Jul 07 20:12:18 managed-node2 podman[32019]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 Jul 07 20:12:18 managed-node2 podman[32019]: Container: Jul 07 20:12:18 managed-node2 podman[32019]: 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd Jul 07 20:12:18 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2087. Jul 07 20:12:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:12:19 managed-node2 python3.12[32279]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:20 managed-node2 python3.12[32436]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:21 managed-node2 python3.12[32592]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:22 managed-node2 python3.12[32747]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:23 managed-node2 podman[32933]: 2025-07-07 20:12:23.116451069 -0400 EDT m=+0.400655980 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:23 managed-node2 python3.12[33102]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:23 managed-node2 python3.12[33257]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:24 managed-node2 python3.12[33412]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:12:24 managed-node2 python3.12[33537]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933544.127541-13777-126359990981212/.source.yml _original_basename=.mz8q_k1v follow=False checksum=c8e8f54a2e9107a24008cfb6f1d2d59b89d86a42 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:25 managed-node2 python3.12[33692]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.227950329 -0400 EDT m=+0.015681172 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:12:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice - cgroup machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice. ░░ Subject: A start job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished successfully. ░░ ░░ The job identifier is 2285. Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.26795522 -0400 EDT m=+0.055686154 container create 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.274184949 -0400 EDT m=+0.061915786 pod create 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.300940456 -0400 EDT m=+0.088671322 container create bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:12:25 managed-node2 kernel: veth1: entered allmulticast mode Jul 07 20:12:25 managed-node2 kernel: veth1: entered promiscuous mode Jul 07 20:12:25 managed-node2 NetworkManager[714]: [1751933545.3256] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.275812121 -0400 EDT m=+0.063543116 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:25 managed-node2 (udev-worker)[33710]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:25 managed-node2 NetworkManager[714]: [1751933545.3293] device (veth1): carrier: link connected Jul 07 20:12:25 managed-node2 systemd[1]: Started libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope. ░░ Subject: A start job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully. ░░ ░░ The job identifier is 2291. Jul 07 20:12:25 managed-node2 systemd[1]: Started libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope - libcrun container. ░░ Subject: A start job for unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully. ░░ ░░ The job identifier is 2298. Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.421199278 -0400 EDT m=+0.208930271 container init 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.423916963 -0400 EDT m=+0.211647873 container start 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:25 managed-node2 systemd[1]: Started libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope. ░░ Subject: A start job for unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully. ░░ ░░ The job identifier is 2305. Jul 07 20:12:25 managed-node2 systemd[1]: Started libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope - libcrun container. ░░ Subject: A start job for unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully. ░░ ░░ The job identifier is 2312. Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.47967795 -0400 EDT m=+0.267408864 container init bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.482134977 -0400 EDT m=+0.269865956 container start bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.48810857 -0400 EDT m=+0.275839437 pod start 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:26 managed-node2 python3.12[33903]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:12:26 managed-node2 systemd[1]: Reload requested from client PID 33904 ('systemctl') (unit session-5.scope)... Jul 07 20:12:26 managed-node2 systemd[1]: Reloading... Jul 07 20:12:26 managed-node2 systemd-rc-local-generator[33949]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:26 managed-node2 systemd[1]: Reloading finished in 216 ms. Jul 07 20:12:26 managed-node2 python3.12[34116]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jul 07 20:12:26 managed-node2 systemd[1]: Reload requested from client PID 34119 ('systemctl') (unit session-5.scope)... Jul 07 20:12:26 managed-node2 systemd[1]: Reloading... Jul 07 20:12:27 managed-node2 systemd-rc-local-generator[34169]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:27 managed-node2 systemd[1]: Reloading finished in 222 ms. Jul 07 20:12:27 managed-node2 python3.12[34331]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:12:27 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2319. Jul 07 20:12:27 managed-node2 podman[34335]: 2025-07-07 20:12:27.799634553 -0400 EDT m=+0.024842125 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:28 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: time="2025-07-07T20:12:37-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jul 07 20:12:37 managed-node2 systemd[1]: libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.830033292 -0400 EDT m=+10.055241268 container died bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-404f57844fa9dde2639f08876faa04d4c046f22836d60e203fb44096347d56de-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-404f57844fa9dde2639f08876faa04d4c046f22836d60e203fb44096347d56de-merged.mount has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.871361891 -0400 EDT m=+10.096569436 container cleanup bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jul 07 20:12:37 managed-node2 systemd[1]: libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 systemd[1]: libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.891562873 -0400 EDT m=+10.116770720 container died 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra) Jul 07 20:12:37 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:12:37 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jul 07 20:12:37 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jul 07 20:12:37 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:12:37 managed-node2 systemd[1]: run-netns-netns\x2d9f683135\x2dcb1d\x2d6825\x2d135b\x2df344c71f6412.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d9f683135\x2dcb1d\x2d6825\x2d135b\x2df344c71f6412.mount has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.960143035 -0400 EDT m=+10.185350606 container cleanup 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:37 managed-node2 systemd[1]: Stopping libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope... ░░ Subject: A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has begun execution. ░░ ░░ The job identifier is 2405. Jul 07 20:12:37 managed-node2 systemd[1]: libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 systemd[1]: Stopped libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope. ░░ Subject: A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished. ░░ ░░ The job identifier is 2405 and the job result is done. Jul 07 20:12:37 managed-node2 systemd[1]: Removed slice machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice - cgroup machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice. ░░ Subject: A stop job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished. ░░ ░░ The job identifier is 2404 and the job result is done. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.968733882 -0400 EDT m=+10.193941424 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:37 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.973070119 -0400 EDT m=+10.198277748 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:37 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.999456374 -0400 EDT m=+10.224663949 container remove bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.025619131 -0400 EDT m=+10.250826702 container remove 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:38 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.034330595 -0400 EDT m=+10.259538132 pod remove 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:38 managed-node2 podman[34335]: Pods stopped: Jul 07 20:12:38 managed-node2 podman[34335]: 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 Jul 07 20:12:38 managed-node2 podman[34335]: Pods removed: Jul 07 20:12:38 managed-node2 podman[34335]: 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 Jul 07 20:12:38 managed-node2 podman[34335]: Secrets removed: Jul 07 20:12:38 managed-node2 podman[34335]: Volumes removed: Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.034440895 -0400 EDT m=+10.259648519 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.055160997 -0400 EDT m=+10.280368656 container create f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 systemd[1]: Created slice machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice - cgroup machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice. ░░ Subject: A start job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished successfully. ░░ ░░ The job identifier is 2406. Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.089857565 -0400 EDT m=+10.315065108 container create afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.096374067 -0400 EDT m=+10.321581608 pod create f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.121708454 -0400 EDT m=+10.346916101 container create 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.122052483 -0400 EDT m=+10.347260058 container restart f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 systemd[1]: Started libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope - libcrun container. ░░ Subject: A start job for unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has finished successfully. ░░ ░░ The job identifier is 2412. Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.09796996 -0400 EDT m=+10.323177689 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.161037652 -0400 EDT m=+10.386245273 container init f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.163287704 -0400 EDT m=+10.388495346 container start f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:12:38 managed-node2 kernel: veth1: entered allmulticast mode Jul 07 20:12:38 managed-node2 kernel: veth1: entered promiscuous mode Jul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jul 07 20:12:38 managed-node2 (udev-worker)[34367]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:38 managed-node2 NetworkManager[714]: [1751933558.1845] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jul 07 20:12:38 managed-node2 NetworkManager[714]: [1751933558.1884] device (veth1): carrier: link connected Jul 07 20:12:38 managed-node2 systemd[1]: Started libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope - libcrun container. ░░ Subject: A start job for unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has finished successfully. ░░ ░░ The job identifier is 2418. Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.256569895 -0400 EDT m=+10.481777617 container init afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.259020833 -0400 EDT m=+10.484228554 container start afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 systemd[1]: Started libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope - libcrun container. ░░ Subject: A start job for unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has finished successfully. ░░ ░░ The job identifier is 2425. Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.305507767 -0400 EDT m=+10.530715363 container init 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.307690208 -0400 EDT m=+10.532897848 container start 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.313601662 -0400 EDT m=+10.538809232 pod start f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:12:38 managed-node2 podman[34335]: Pod: Jul 07 20:12:38 managed-node2 podman[34335]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f Jul 07 20:12:38 managed-node2 podman[34335]: Container: Jul 07 20:12:38 managed-node2 podman[34335]: 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e Jul 07 20:12:38 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2319. Jul 07 20:12:39 managed-node2 sudo[34620]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pxlwmudvcyywtlojiblkxiobcxywcxdd ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933558.7922473-14198-135957250003670/AnsiballZ_command.py' Jul 07 20:12:39 managed-node2 sudo[34620]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:12:39 managed-node2 python3.12[34623]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:39 managed-node2 systemd[27434]: Started podman-34631.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Jul 07 20:12:39 managed-node2 sudo[34620]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:12:39 managed-node2 python3.12[34794]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:40 managed-node2 python3.12[34957]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:40 managed-node2 sudo[35170]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-edeafswnzssmkzeqkidxujvilmmhsinz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933560.1733143-14258-164223741543922/AnsiballZ_command.py' Jul 07 20:12:40 managed-node2 sudo[35170]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:12:40 managed-node2 python3.12[35173]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:40 managed-node2 sudo[35170]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:12:40 managed-node2 python3.12[35331]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:41 managed-node2 python3.12[35489]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:42 managed-node2 python3.12[35647]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:42 managed-node2 python3.12[35804]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:43 managed-node2 python3.12[35960]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd1-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:43 managed-node2 python3.12[36116]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd2-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:43 managed-node2 python3.12[36272]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd3-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:46 managed-node2 python3.12[36583]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:47 managed-node2 python3.12[36744]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:50 managed-node2 python3.12[36901]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jul 07 20:12:51 managed-node2 python3.12[37057]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jul 07 20:12:51 managed-node2 python3.12[37214]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jul 07 20:12:52 managed-node2 python3.12[37371]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 07 20:12:54 managed-node2 python3.12[37526]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jul 07 20:12:55 managed-node2 python3.12[37683]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jul 07 20:12:55 managed-node2 python3.12[37839]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jul 07 20:12:56 managed-node2 python3.12[37996]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 07 20:12:58 managed-node2 python3.12[38193]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 07 20:12:59 managed-node2 python3.12[38348]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 07 20:13:03 managed-node2 python3.12[38503]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 07 20:13:03 managed-node2 python3.12[38659]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:04 managed-node2 python3.12[38817]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:04 managed-node2 python3.12[38973]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:05 managed-node2 python3.12[39129]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:06 managed-node2 python3.12[39285]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 07 20:13:07 managed-node2 python3.12[39440]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:07 managed-node2 python3.12[39595]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:07 managed-node2 sudo[39800]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sggfuwotsbnqelqoxqdpnxcztrgpfxov ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933587.546744-15228-221616196309386/AnsiballZ_podman_image.py' Jul 07 20:13:07 managed-node2 sudo[39800]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:07 managed-node2 systemd[27434]: Started podman-39804.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 116. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39811.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 120. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39818.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 124. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39825.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 128. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39832.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 132. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39839.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 136. Jul 07 20:13:08 managed-node2 sudo[39800]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:08 managed-node2 python3.12[40000]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:09 managed-node2 python3.12[40157]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:09 managed-node2 python3.12[40312]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:10 managed-node2 python3.12[40390]: ansible-ansible.legacy.file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=.g46gggh2 recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:10 managed-node2 sudo[40595]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lwtednmiooezcolvhoaprqmyaqljtcnz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933590.280253-15331-51302432010459/AnsiballZ_podman_play.py' Jul 07 20:13:10 managed-node2 sudo[40595]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:13:10 managed-node2 systemd[27434]: Started podman-40605.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 140. Jul 07 20:13:10 managed-node2 systemd[27434]: Created slice user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice - cgroup user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 144. Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-07T20:13:10-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-07T20:13:10-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-07T20:13:10-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-07T20:13:10-04:00" level=info msg="Using sqlite as database backend" time="2025-07-07T20:13:10-04:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-07-07T20:13:10-04:00" level=debug msg="Using graph driver overlay" time="2025-07-07T20:13:10-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-07T20:13:10-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-07T20:13:10-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-07T20:13:10-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-07T20:13:10-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-07T20:13:10-04:00" level=debug msg="Using transient store: false" time="2025-07-07T20:13:10-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-07T20:13:10-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:13:10-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:13:10-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-07T20:13:10-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-07T20:13:10-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-07T20:13:10-04:00" level=debug msg="Initializing event backend file" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-07-07T20:13:10-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-07T20:13:10-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 8ffa2dc6ff76aec6ab19c0699b0508615d79e3b6d14dd7cd78be0f62c7718f3e bridge podman1 2025-07-07 20:11:45.408967969 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-07T20:13:10-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-07T20:13:10-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-07T20:13:10-04:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice for parent user.slice and name libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d" time="2025-07-07T20:13:10-04:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice" time="2025-07-07T20:13:10-04:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice" Error: adding pod to state: name "httpd1" is in use: pod already exists time="2025-07-07T20:13:10-04:00" level=debug msg="Shutting down engines" time="2025-07-07T20:13:10-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=40605 Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 07 20:13:10 managed-node2 sudo[40595]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:11 managed-node2 python3.12[40767]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:13:12 managed-node2 python3.12[40923]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:13 managed-node2 python3.12[41080]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:14 managed-node2 python3.12[41236]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:14 managed-node2 python3.12[41391]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:15 managed-node2 podman[41576]: 2025-07-07 20:13:15.556653463 -0400 EDT m=+0.319976011 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:13:15 managed-node2 python3.12[41746]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:16 managed-node2 python3.12[41903]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:16 managed-node2 python3.12[42058]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:17 managed-node2 python3.12[42136]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=.cnp2b107 recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:13:17 managed-node2 podman[42298]: 2025-07-07 20:13:17.610100559 -0400 EDT m=+0.016705745 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:13:17 managed-node2 systemd[1]: Created slice machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice - cgroup machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice. ░░ Subject: A start job for unit machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice has finished successfully. ░░ ░░ The job identifier is 2432. Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-07T20:13:17-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-07T20:13:17-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-07T20:13:17-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-07T20:13:17-04:00" level=info msg="Using sqlite as database backend" time="2025-07-07T20:13:17-04:00" level=debug msg="Using graph driver overlay" time="2025-07-07T20:13:17-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-07T20:13:17-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-07T20:13:17-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-07T20:13:17-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-07T20:13:17-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-07T20:13:17-04:00" level=debug msg="Using transient store: false" time="2025-07-07T20:13:17-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-07T20:13:17-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:13:17-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:13:17-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-07T20:13:17-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-07T20:13:17-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-07T20:13:17-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-07T20:13:17-04:00" level=debug msg="Initializing event backend journald" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-07-07T20:13:17-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-07T20:13:17-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba bridge podman1 2025-07-07 20:10:03.41385383 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-07T20:13:17-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-07T20:13:17-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-07T20:13:17-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice for parent machine.slice and name libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9" time="2025-07-07T20:13:17-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice" time="2025-07-07T20:13:17-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice" Error: adding pod to state: name "httpd2" is in use: pod already exists time="2025-07-07T20:13:17-04:00" level=debug msg="Shutting down engines" time="2025-07-07T20:13:17-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=42298 Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 07 20:13:18 managed-node2 python3.12[42459]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:20 managed-node2 python3.12[42616]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:20 managed-node2 python3.12[42772]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:21 managed-node2 python3.12[42927]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:22 managed-node2 podman[43112]: 2025-07-07 20:13:22.329599025 -0400 EDT m=+0.443555601 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:13:22 managed-node2 python3.12[43281]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:23 managed-node2 python3.12[43438]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:23 managed-node2 python3.12[43593]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:24 managed-node2 python3.12[43671]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=.garaxq8q recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:24 managed-node2 python3.12[43826]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:13:24 managed-node2 podman[43834]: 2025-07-07 20:13:24.584154544 -0400 EDT m=+0.014632770 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:13:24 managed-node2 systemd[1]: Created slice machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice - cgroup machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice. ░░ Subject: A start job for unit machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice has finished successfully. ░░ ░░ The job identifier is 2438. Jul 07 20:13:25 managed-node2 sudo[44045]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fkixybyzrrzuqcjcfmgozxwfwmajookd ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933605.3310475-16168-26421251595513/AnsiballZ_command.py' Jul 07 20:13:25 managed-node2 sudo[44045]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:25 managed-node2 python3.12[44048]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:25 managed-node2 systemd[27434]: Started podman-44056.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 148. Jul 07 20:13:25 managed-node2 sudo[44045]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:26 managed-node2 python3.12[44220]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:26 managed-node2 python3.12[44383]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:26 managed-node2 sudo[44596]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ydnvtlhbpgwizyiplpnqjwanppjzhlbg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933606.7243044-16231-16521388663911/AnsiballZ_command.py' Jul 07 20:13:26 managed-node2 sudo[44596]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:27 managed-node2 python3.12[44599]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:27 managed-node2 sudo[44596]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:27 managed-node2 python3.12[44757]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:27 managed-node2 python3.12[44915]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:28 managed-node2 python3.12[45073]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:28 managed-node2 python3.12[45229]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:29 managed-node2 python3.12[45385]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:31 managed-node2 python3.12[45696]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:32 managed-node2 python3.12[45857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:36 managed-node2 python3.12[46014]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 07 20:13:36 managed-node2 python3.12[46170]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:37 managed-node2 python3.12[46327]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:37 managed-node2 python3.12[46483]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:38 managed-node2 python3.12[46639]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:39 managed-node2 python3.12[46795]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:39 managed-node2 sudo[47002]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cdbeefpvsgoajxqmxzaiihkscmhhxllh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933619.5337389-16868-45510891100291/AnsiballZ_systemd.py' Jul 07 20:13:39 managed-node2 sudo[47002]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:40 managed-node2 python3.12[47005]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:13:40 managed-node2 systemd[27434]: Reload requested from client PID 47008 ('systemctl')... Jul 07 20:13:40 managed-node2 systemd[27434]: Reloading... Jul 07 20:13:40 managed-node2 systemd[27434]: Reloading finished in 62 ms. Jul 07 20:13:40 managed-node2 systemd[27434]: Stopping podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 152. Jul 07 20:13:50 managed-node2 podman[47019]: time="2025-07-07T20:13:50-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Jul 07 20:13:50 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:13:50 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:13:50 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:13:50 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:13:50 managed-node2 systemd[27434]: Removed slice user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice - cgroup user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 153 and the job result is done. Jul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory Jul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory Jul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory Jul 07 20:13:50 managed-node2 podman[47019]: Pods stopped: Jul 07 20:13:50 managed-node2 podman[47019]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6 Jul 07 20:13:50 managed-node2 podman[47019]: Pods removed: Jul 07 20:13:50 managed-node2 podman[47019]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6 Jul 07 20:13:50 managed-node2 podman[47019]: Secrets removed: Jul 07 20:13:50 managed-node2 podman[47019]: Volumes removed: Jul 07 20:13:50 managed-node2 systemd[27434]: Stopped podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 152 and the job result is done. Jul 07 20:13:50 managed-node2 systemd[27434]: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service: Consumed 589ms CPU time, 74.3M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jul 07 20:13:50 managed-node2 sudo[47002]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:50 managed-node2 python3.12[47222]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:51 managed-node2 sudo[47429]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ragjadshxwcsmhyabehcrpjvrodecwop ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933631.1425395-17190-192702739809308/AnsiballZ_podman_play.py' Jul 07 20:13:51 managed-node2 sudo[47429]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play version: 5.5.1, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 07 20:13:51 managed-node2 systemd[27434]: Started podman-47439.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 154. Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 07 20:13:51 managed-node2 sudo[47429]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:52 managed-node2 python3.12[47600]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:53 managed-node2 python3.12[47755]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:13:53 managed-node2 python3.12[47911]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:54 managed-node2 python3.12[48068]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:55 managed-node2 python3.12[48224]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:13:55 managed-node2 systemd[1]: Reload requested from client PID 48227 ('systemctl') (unit session-5.scope)... Jul 07 20:13:55 managed-node2 systemd[1]: Reloading... Jul 07 20:13:55 managed-node2 systemd-rc-local-generator[48270]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:13:55 managed-node2 systemd[1]: Reloading finished in 218 ms. Jul 07 20:13:55 managed-node2 systemd[1]: Stopping podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2445. Jul 07 20:13:56 managed-node2 podman[48286]: 2025-07-07 20:13:56.040685373 -0400 EDT m=+0.023115658 pod stop 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2) Jul 07 20:14:06 managed-node2 podman[48286]: time="2025-07-07T20:14:06-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jul 07 20:14:06 managed-node2 systemd[1]: libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.074195184 -0400 EDT m=+10.056625730 container died 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9fa181bdd3e6904e4a3e75e30d4505da5f0cd638ecd007c086bea8376c79fc52-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-9fa181bdd3e6904e4a3e75e30d4505da5f0cd638ecd007c086bea8376c79fc52-merged.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.11235352 -0400 EDT m=+10.094783772 container cleanup 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:14:06 managed-node2 systemd[1]: libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.131606724 -0400 EDT m=+10.114044140 container died 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:14:06 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:14:06 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:14:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:14:06 managed-node2 systemd[1]: run-netns-netns\x2d82249bc3\x2db382\x2d7b9a\x2d81bc\x2d86e8308d188a.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d82249bc3\x2db382\x2d7b9a\x2d81bc\x2d86e8308d188a.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.202757575 -0400 EDT m=+10.185187931 container cleanup 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 systemd[1]: Removed slice machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice - cgroup machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice. ░░ Subject: A stop job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished. ░░ ░░ The job identifier is 2446 and the job result is done. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.23022593 -0400 EDT m=+10.212656217 container remove 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.25693135 -0400 EDT m=+10.239361641 container remove 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 systemd[1]: machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice: Failed to open /run/systemd/transient/machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice: No such file or directory Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.265332892 -0400 EDT m=+10.247763153 pod remove 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2) Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.268022285 -0400 EDT m=+10.250452816 container kill 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 systemd[1]: libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.274842916 -0400 EDT m=+10.257273277 container died 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.336039419 -0400 EDT m=+10.318469707 container remove 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 podman[48286]: Pods stopped: Jul 07 20:14:06 managed-node2 podman[48286]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 Jul 07 20:14:06 managed-node2 podman[48286]: Pods removed: Jul 07 20:14:06 managed-node2 podman[48286]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 Jul 07 20:14:06 managed-node2 podman[48286]: Secrets removed: Jul 07 20:14:06 managed-node2 podman[48286]: Volumes removed: Jul 07 20:14:06 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 systemd[1]: Stopped podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished. ░░ ░░ The job identifier is 2445 and the job result is done. Jul 07 20:14:06 managed-node2 python3.12[48488]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 07 20:14:07 managed-node2 python3.12[48813]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:08 managed-node2 python3.12[48969]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:10 managed-node2 python3.12[49126]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:10 managed-node2 python3.12[49282]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:14:10 managed-node2 systemd[1]: Reload requested from client PID 49285 ('systemctl') (unit session-5.scope)... Jul 07 20:14:10 managed-node2 systemd[1]: Reloading... Jul 07 20:14:11 managed-node2 systemd-rc-local-generator[49327]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:14:11 managed-node2 systemd[1]: Reloading finished in 211 ms. Jul 07 20:14:11 managed-node2 systemd[1]: Stopping podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2447. Jul 07 20:14:11 managed-node2 podman[49344]: 2025-07-07 20:14:11.206854573 -0400 EDT m=+0.022831781 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:14:21 managed-node2 podman[49344]: time="2025-07-07T20:14:21-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jul 07 20:14:21 managed-node2 systemd[1]: libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.240228718 -0400 EDT m=+10.056206048 container died 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test) Jul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay-350f35f9a3dec1a954b9c8301592ec6c7cff326df9aa3350ca38ff2248bf45f0-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-350f35f9a3dec1a954b9c8301592ec6c7cff326df9aa3350ca38ff2248bf45f0-merged.mount has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.280265572 -0400 EDT m=+10.096242780 container cleanup 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:14:21 managed-node2 systemd[1]: libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.298799022 -0400 EDT m=+10.114776654 container died afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 systemd[1]: run-p32110-i32410.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p32110-i32410.scope has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:14:21 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jul 07 20:14:21 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jul 07 20:14:21 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:14:21 managed-node2 NetworkManager[714]: [1751933661.3375] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 07 20:14:21 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2448. Jul 07 20:14:21 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2448. Jul 07 20:14:21 managed-node2 systemd[1]: run-netns-netns\x2dd8d23001\x2dccdd\x2d98d5\x2d0185\x2d01ce80e8c916.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd8d23001\x2dccdd\x2d98d5\x2d0185\x2d01ce80e8c916.mount has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.414963852 -0400 EDT m=+10.230941061 container cleanup afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 systemd[1]: Removed slice machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice - cgroup machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice. ░░ Subject: A stop job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished. ░░ ░░ The job identifier is 2527 and the job result is done. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.422214439 -0400 EDT m=+10.238191678 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.428641853 -0400 EDT m=+10.244619177 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.436145908 -0400 EDT m=+10.252123291 container kill f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 systemd[1]: libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.451967497 -0400 EDT m=+10.267945068 container died f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.478053211 -0400 EDT m=+10.294030464 container remove 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.504395032 -0400 EDT m=+10.320372288 container remove afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.513441882 -0400 EDT m=+10.329419091 pod remove f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:14:21 managed-node2 podman[49397]: 2025-07-07 20:14:21.533507126 -0400 EDT m=+0.087614608 container cleanup f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.560230359 -0400 EDT m=+10.376207602 container remove f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 podman[49344]: Pods stopped: Jul 07 20:14:21 managed-node2 podman[49344]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f Jul 07 20:14:21 managed-node2 podman[49344]: Pods removed: Jul 07 20:14:21 managed-node2 podman[49344]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f Jul 07 20:14:21 managed-node2 podman[49344]: Secrets removed: Jul 07 20:14:21 managed-node2 podman[49344]: Volumes removed: Jul 07 20:14:21 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 systemd[1]: Stopped podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished. ░░ ░░ The job identifier is 2447 and the job result is done. Jul 07 20:14:21 managed-node2 python3.12[49562]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 python3.12[49720]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:14:22 managed-node2 python3.12[49720]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 python3.12[49888]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:23 managed-node2 python3.12[50043]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 07 20:14:24 managed-node2 python3.12[50199]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:24 managed-node2 sudo[50407]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jpalnwprgspnzsnsvsnylrerubrespev ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933664.3660543-18219-69893342173436/AnsiballZ_podman_container_info.py' Jul 07 20:14:24 managed-node2 sudo[50407]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:24 managed-node2 python3.12[50410]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jul 07 20:14:24 managed-node2 systemd[27434]: Started podman-50411.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 158. Jul 07 20:14:24 managed-node2 sudo[50407]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:25 managed-node2 sudo[50623]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lygojmlyopjnwoxfpskxczdponstloyj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933664.983715-18245-114379203266243/AnsiballZ_command.py' Jul 07 20:14:25 managed-node2 sudo[50623]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:25 managed-node2 python3.12[50626]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:25 managed-node2 systemd[27434]: Started podman-50627.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 162. Jul 07 20:14:25 managed-node2 sudo[50623]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:25 managed-node2 sudo[50839]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iwlmlwzhyduyhuxfusqoosixqzgzqvsl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933665.559254-18262-213663586865707/AnsiballZ_command.py' Jul 07 20:14:25 managed-node2 sudo[50839]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:25 managed-node2 python3.12[50842]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:25 managed-node2 systemd[27434]: Started podman-50843.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 166. Jul 07 20:14:25 managed-node2 sudo[50839]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:26 managed-node2 python3.12[51004]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jul 07 20:14:26 managed-node2 systemd[1]: Stopping user@3001.service - User Manager for UID 3001... ░░ Subject: A stop job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 2529. Jul 07 20:14:26 managed-node2 systemd[27434]: Activating special unit exit.target... Jul 07 20:14:26 managed-node2 systemd[27434]: Stopping podman-pause-8c2d7b35.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 177. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 181 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: app-podman\x2dkube.slice: Consumed 589ms CPU time, 74.4M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice - cgroup user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 178 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 191 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 190 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 184 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 187 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 189 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 186 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 185 and the job result is done. Jul 07 20:14:26 managed-node2 dbus-broker[27991]: Dispatched 2275 messages @ 3(±13)μs / message. ░░ Subject: Dispatched 2275 messages ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ This message is printed by dbus-broker when shutting down. It includes metric ░░ information collected during the runtime of dbus-broker. ░░ ░░ The message lists the number of dispatched messages ░░ (in this case 2275) as well as the mean time to ░░ handling a single message. The time measurements exclude the time spent on ░░ writing to and reading from the kernel. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopping dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 174. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 179 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped podman-pause-8c2d7b35.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 177 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice user.slice - Slice /user. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 176 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 174 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice session.slice - User Core Session Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 175 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 180 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 182 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: app.slice: Consumed 615ms CPU time, 74.5M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jul 07 20:14:26 managed-node2 systemd[27434]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 173. Jul 07 20:14:26 managed-node2 systemd[27434]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 171. Jul 07 20:14:26 managed-node2 systemd[27434]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 170. Jul 07 20:14:26 managed-node2 systemd-logind[659]: Removed session 6. ░░ Subject: Session 6 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 6 has been terminated. Jul 07 20:14:26 managed-node2 systemd[1]: user@3001.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@3001.service has successfully entered the 'dead' state. Jul 07 20:14:26 managed-node2 systemd[1]: Stopped user@3001.service - User Manager for UID 3001. ░░ Subject: A stop job for unit user@3001.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@3001.service has finished. ░░ ░░ The job identifier is 2529 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[1]: user@3001.service: Consumed 2.005s CPU time, 92.7M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@3001.service completed and consumed the indicated resources. Jul 07 20:14:26 managed-node2 systemd[1]: Stopping user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A stop job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 2528. Jul 07 20:14:26 managed-node2 systemd[1]: run-user-3001.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-3001.mount has successfully entered the 'dead' state. Jul 07 20:14:26 managed-node2 systemd[1]: user-runtime-dir@3001.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@3001.service has successfully entered the 'dead' state. Jul 07 20:14:26 managed-node2 systemd[1]: Stopped user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A stop job for unit user-runtime-dir@3001.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@3001.service has finished. ░░ ░░ The job identifier is 2528 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[1]: Removed slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A stop job for unit user-3001.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-3001.slice has finished. ░░ ░░ The job identifier is 2530 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[1]: user-3001.slice: Consumed 2.031s CPU time, 92.8M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-3001.slice completed and consumed the indicated resources. Jul 07 20:14:26 managed-node2 python3.12[51165]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:29 managed-node2 python3.12[51321]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:31 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:14:31 managed-node2 python3.12[51478]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:33 managed-node2 python3.12[51634]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:36 managed-node2 python3.12[51790]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:38 managed-node2 python3.12[51946]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:39 managed-node2 sudo[52152]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tfrixhsqpnpoemptjbnyouggwynzakes ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933679.2995527-18658-251091507513885/AnsiballZ_command.py' Jul 07 20:14:39 managed-node2 sudo[52152]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:39 managed-node2 python3.12[52155]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:39 managed-node2 sudo[52152]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:40 managed-node2 python3.12[52317]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd2 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:40 managed-node2 python3.12[52479]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd3 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:40 managed-node2 sudo[52692]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tblibtrtarfdedjsuypqezvumnboznzr ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933680.662618-18720-8327876079082/AnsiballZ_command.py' Jul 07 20:14:40 managed-node2 sudo[52692]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:40 managed-node2 python3.12[52695]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:41 managed-node2 sudo[52692]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:41 managed-node2 python3.12[52853]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:41 managed-node2 python3.12[53011]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:42 managed-node2 python3.12[53169]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:44 managed-node2 python3.12[53479]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:45 managed-node2 python3.12[53640]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:14:45 managed-node2 python3.12[53796]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:48 managed-node2 python3.12[53953]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 07 20:14:48 managed-node2 python3.12[54109]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:49 managed-node2 python3.12[54266]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:49 managed-node2 python3.12[54422]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:50 managed-node2 python3.12[54578]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:51 managed-node2 python3.12[54734]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:51 managed-node2 python3.12[54889]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:52 managed-node2 python3.12[55044]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:53 managed-node2 python3.12[55199]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:14:53 managed-node2 python3.12[55355]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:55 managed-node2 python3.12[55512]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:56 managed-node2 python3.12[55668]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:14:56 managed-node2 python3.12[55825]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:56 managed-node2 python3.12[55980]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:58 managed-node2 python3.12[56135]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:59 managed-node2 python3.12[56292]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:00 managed-node2 python3.12[56448]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:15:00 managed-node2 python3.12[56605]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:01 managed-node2 python3.12[56760]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:02 managed-node2 python3.12[56915]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 07 20:15:02 managed-node2 python3.12[57071]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:03 managed-node2 python3.12[57226]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:04 managed-node2 python3.12[57381]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:06 managed-node2 python3.12[57587]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:15:07 managed-node2 python3.12[57771]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:07 managed-node2 python3.12[57926]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:09 managed-node2 python3.12[58236]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:10 managed-node2 python3.12[58398]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:15:10 managed-node2 python3.12[58554]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:12 managed-node2 python3.12[58711]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:13 managed-node2 python3.12[58868]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:14 managed-node2 python3.12[59023]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:14 managed-node2 python3.12[59148]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933714.0353012-19840-138139497644559/.source.pod dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:15 managed-node2 python3.12[59303]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:15:15 managed-node2 systemd[1]: Reload requested from client PID 59304 ('systemctl') (unit session-5.scope)... Jul 07 20:15:15 managed-node2 systemd[1]: Reloading... Jul 07 20:15:15 managed-node2 systemd-rc-local-generator[59347]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:15:15 managed-node2 systemd[1]: Reloading finished in 191 ms. Jul 07 20:15:16 managed-node2 python3.12[59512]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:15:16 managed-node2 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2532. Jul 07 20:15:16 managed-node2 systemd[1]: Created slice machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice - cgroup machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice. ░░ Subject: A start job for unit machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice has finished successfully. ░░ ░░ The job identifier is 2616. Jul 07 20:15:16 managed-node2 podman[59516]: 2025-07-07 20:15:16.265732025 -0400 EDT m=+0.061820196 container create 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jul 07 20:15:16 managed-node2 podman[59516]: 2025-07-07 20:15:16.272319983 -0400 EDT m=+0.068408122 pod create fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 (image=, name=quadlet-pod) Jul 07 20:15:16 managed-node2 quadlet-pod-pod-pod[59516]: fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 Jul 07 20:15:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3361] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered blocking state Jul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered disabled state Jul 07 20:15:16 managed-node2 kernel: veth0: entered allmulticast mode Jul 07 20:15:16 managed-node2 kernel: veth0: entered promiscuous mode Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3447] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jul 07 20:15:16 managed-node2 (udev-worker)[59536]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:15:16 managed-node2 (udev-worker)[59537]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered blocking state Jul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered forwarding state Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3546] device (veth0): carrier: link connected Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3548] device (podman0): carrier: link connected Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3683] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3701] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3707] device (podman0): Activation: starting connection 'podman0' (845713e4-a167-41fd-b540-293fb4a7aacd) Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3708] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3710] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3712] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3714] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2622. Jul 07 20:15:16 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2622. Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4101] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4105] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4116] device (podman0): Activation: successful, device activated. Jul 07 20:15:16 managed-node2 systemd[1]: Started libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope - libcrun container. ░░ Subject: A start job for unit libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope has finished successfully. ░░ ░░ The job identifier is 2701. Jul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.467418922 -0400 EDT m=+0.168638968 container init 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.469862017 -0400 EDT m=+0.171082040 container start 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.47596405 -0400 EDT m=+0.177184035 pod start fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 (image=, name=quadlet-pod) Jul 07 20:15:16 managed-node2 quadlet-pod-pod-pod[59525]: quadlet-pod Jul 07 20:15:16 managed-node2 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2532. Jul 07 20:15:17 managed-node2 python3.12[59737]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:18 managed-node2 podman[59923]: 2025-07-07 20:15:18.970989919 -0400 EDT m=+0.333232348 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:15:19 managed-node2 python3.12[60094]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:19 managed-node2 python3.12[60249]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:20 managed-node2 python3.12[60374]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933719.570603-19951-98699047571118/.source.container dest=/etc/containers/systemd/quadlet-pod-container.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:20 managed-node2 python3.12[60529]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:15:20 managed-node2 systemd[1]: Reload requested from client PID 60530 ('systemctl') (unit session-5.scope)... Jul 07 20:15:20 managed-node2 systemd[1]: Reloading... Jul 07 20:15:20 managed-node2 systemd-rc-local-generator[60574]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:15:20 managed-node2 systemd[1]: Reloading finished in 199 ms. Jul 07 20:15:21 managed-node2 python3.12[60739]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:15:21 managed-node2 systemd[1]: Starting quadlet-pod-container.service... ░░ Subject: A start job for unit quadlet-pod-container.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has begun execution. ░░ ░░ The job identifier is 2708. Jul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.598559882 -0400 EDT m=+0.046043355 container create 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service) Jul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.641059859 -0400 EDT m=+0.088543485 container init 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z) Jul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.643379876 -0400 EDT m=+0.090863506 container start 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:15:21 managed-node2 systemd[1]: Started quadlet-pod-container.service. ░░ Subject: A start job for unit quadlet-pod-container.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has finished successfully. ░░ ░░ The job identifier is 2708. Jul 07 20:15:21 managed-node2 quadlet-pod-container[60743]: 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d Jul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.576103282 -0400 EDT m=+0.023586939 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:15:22 managed-node2 python3.12[60912]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:23 managed-node2 python3.12[61068]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:24 managed-node2 python3.12[61224]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}} {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:25 managed-node2 python3.12[61388]: ansible-user Invoked with name=user_quadlet_pod uid=2223 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jul 07 20:15:25 managed-node2 useradd[61390]: new group: name=user_quadlet_pod, GID=2223 Jul 07 20:15:25 managed-node2 useradd[61390]: new user: name=user_quadlet_pod, UID=2223, GID=2223, home=/home/user_quadlet_pod, shell=/bin/bash, from=/dev/pts/0 Jul 07 20:15:25 managed-node2 rsyslogd[883]: imjournal: journal files changed, reloading... [v8.2506.0-1.el10 try https://www.rsyslog.com/e/0 ] Jul 07 20:15:25 managed-node2 rsyslogd[883]: imjournal: journal files changed, reloading... [v8.2506.0-1.el10 try https://www.rsyslog.com/e/0 ] Jul 07 20:15:26 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:15:27 managed-node2 python3.12[61703]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:28 managed-node2 python3.12[61866]: ansible-getent Invoked with database=passwd key=user_quadlet_pod fail_key=False service=None split=None Jul 07 20:15:28 managed-node2 python3.12[62022]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:29 managed-node2 python3.12[62180]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:29 managed-node2 python3.12[62336]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:31 managed-node2 python3.12[62492]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:32 managed-node2 python3.12[62649]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:32 managed-node2 python3.12[62805]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:33 managed-node2 python3.12[62961]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 07 20:15:33 managed-node2 systemd[1]: Created slice user-2223.slice - User Slice of UID 2223. ░░ Subject: A start job for unit user-2223.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-2223.slice has finished successfully. ░░ ░░ The job identifier is 2871. Jul 07 20:15:33 managed-node2 systemd[1]: Starting user-runtime-dir@2223.service - User Runtime Directory /run/user/2223... ░░ Subject: A start job for unit user-runtime-dir@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@2223.service has begun execution. ░░ ░░ The job identifier is 2793. Jul 07 20:15:34 managed-node2 systemd[1]: Finished user-runtime-dir@2223.service - User Runtime Directory /run/user/2223. ░░ Subject: A start job for unit user-runtime-dir@2223.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@2223.service has finished successfully. ░░ ░░ The job identifier is 2793. Jul 07 20:15:34 managed-node2 systemd[1]: Starting user@2223.service - User Manager for UID 2223... ░░ Subject: A start job for unit user@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@2223.service has begun execution. ░░ ░░ The job identifier is 2873. Jul 07 20:15:34 managed-node2 systemd-logind[659]: New session 7 of user user_quadlet_pod. ░░ Subject: A new session 7 has been created for user user_quadlet_pod ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 7 has been created for the user user_quadlet_pod. ░░ ░░ The leading process of the session is 62973. Jul 07 20:15:34 managed-node2 (systemd)[62973]: pam_unix(systemd-user:session): session opened for user user_quadlet_pod(uid=2223) by user_quadlet_pod(uid=0) Jul 07 20:15:34 managed-node2 systemd[62973]: Queued start job for default target default.target. Jul 07 20:15:34 managed-node2 systemd[62973]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jul 07 20:15:34 managed-node2 systemd[62973]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jul 07 20:15:34 managed-node2 systemd[62973]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jul 07 20:15:34 managed-node2 systemd[62973]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jul 07 20:15:34 managed-node2 systemd[62973]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jul 07 20:15:34 managed-node2 systemd[62973]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jul 07 20:15:34 managed-node2 systemd[62973]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jul 07 20:15:34 managed-node2 systemd[62973]: Startup finished in 74ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 2223 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 74648 microseconds. Jul 07 20:15:34 managed-node2 systemd[1]: Started user@2223.service - User Manager for UID 2223. ░░ Subject: A start job for unit user@2223.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@2223.service has finished successfully. ░░ ░░ The job identifier is 2873. Jul 07 20:15:34 managed-node2 python3.12[63135]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:34 managed-node2 python3.12[63290]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:35 managed-node2 python3.12[63415]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933734.666246-20447-234683972003494/.source.pod dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:35 managed-node2 sudo[63620]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwdxpsyaqgiwosrsmcnobsvzckxuxltr ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933735.4058456-20475-145399815936558/AnsiballZ_systemd.py' Jul 07 20:15:35 managed-node2 sudo[63620]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:35 managed-node2 python3.12[63623]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:15:35 managed-node2 python3.12[63623]: ansible-systemd [WARNING] Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually Jul 07 20:15:35 managed-node2 systemd[62973]: Reload requested from client PID 63624 ('systemctl')... Jul 07 20:15:35 managed-node2 systemd[62973]: Reloading... Jul 07 20:15:35 managed-node2 systemd[62973]: Reloading finished in 42 ms. Jul 07 20:15:35 managed-node2 sudo[63620]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:36 managed-node2 sudo[63839]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ofgmoaezuncvsjzylzfbwkllzxyiawqa ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933736.0668013-20497-124358191920012/AnsiballZ_systemd.py' Jul 07 20:15:36 managed-node2 sudo[63839]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:36 managed-node2 python3.12[63842]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:15:36 managed-node2 systemd[62973]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user.... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 25. Jul 07 20:15:36 managed-node2 sh[63846]: active Jul 07 20:15:36 managed-node2 systemd[62973]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 25. Jul 07 20:15:36 managed-node2 systemd[62973]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jul 07 20:15:36 managed-node2 systemd[62973]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jul 07 20:15:36 managed-node2 systemd[62973]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 26. Jul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jul 07 20:15:36 managed-node2 systemd[62973]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 26. Jul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Ready Jul 07 20:15:36 managed-node2 systemd[62973]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 33. Jul 07 20:15:36 managed-node2 systemd[62973]: Created slice user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice - cgroup user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 32. Jul 07 20:15:36 managed-node2 quadlet-pod-pod-pod[63855]: 704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de Jul 07 20:15:36 managed-node2 systemd[62973]: Started podman-pause-569872be.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 36. Jul 07 20:15:36 managed-node2 systemd[62973]: Started libpod-31f1b98498c312272d594cc143121f0c4e208b416f5b06370302a3ade84678f0.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 40. Jul 07 20:15:36 managed-node2 quadlet-pod-pod-pod[63874]: quadlet-pod Jul 07 20:15:36 managed-node2 systemd[62973]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jul 07 20:15:36 managed-node2 sudo[63839]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:37 managed-node2 python3.12[64055]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:38 managed-node2 python3.12[64212]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:38 managed-node2 python3.12[64368]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:39 managed-node2 python3.12[64524]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 07 20:15:40 managed-node2 sudo[64729]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-opzncrxhlgpablgicvdgsyjydbaatunc ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933740.2555504-20657-253835226843587/AnsiballZ_podman_image.py' Jul 07 20:15:40 managed-node2 sudo[64729]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:40 managed-node2 systemd[62973]: Started podman-64733.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 45. Jul 07 20:15:40 managed-node2 systemd[62973]: Started podman-64740.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 49. Jul 07 20:15:41 managed-node2 systemd[62973]: Started podman-64765.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 53. Jul 07 20:15:41 managed-node2 sudo[64729]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:41 managed-node2 python3.12[64927]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:42 managed-node2 python3.12[65082]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:42 managed-node2 python3.12[65207]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933742.1096382-20721-81203128614920/.source.container dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:43 managed-node2 sudo[65412]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dradgtbmatpvlibzybrbrgncbuvsbmla ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933742.8958044-20756-33448022762234/AnsiballZ_systemd.py' Jul 07 20:15:43 managed-node2 sudo[65412]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:43 managed-node2 python3.12[65415]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:15:43 managed-node2 systemd[62973]: Reload requested from client PID 65416 ('systemctl')... Jul 07 20:15:43 managed-node2 systemd[62973]: Reloading... Jul 07 20:15:43 managed-node2 systemd[62973]: Reloading finished in 50 ms. Jul 07 20:15:43 managed-node2 sudo[65412]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:43 managed-node2 sudo[65630]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ypfsehmisorrgovabzrcscgtxcipauhh ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933743.5809875-20783-151961798381358/AnsiballZ_systemd.py' Jul 07 20:15:43 managed-node2 sudo[65630]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:44 managed-node2 python3.12[65633]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:15:44 managed-node2 systemd[62973]: Starting quadlet-pod-container.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 57. Jul 07 20:15:44 managed-node2 systemd[62973]: Started quadlet-pod-container.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 57. Jul 07 20:15:44 managed-node2 quadlet-pod-container[65636]: f4293ce7df9976771eedba45cd946f75d5668af766c076f83710b7afcc49d748 Jul 07 20:15:44 managed-node2 sudo[65630]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:44 managed-node2 python3.12[65805]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:45 managed-node2 python3.12[65961]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:45 managed-node2 sudo[66167]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkgmctvpahwcnyvziosokhuvkvmstaqp ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933745.1806111-20829-116188117400154/AnsiballZ_command.py' Jul 07 20:15:45 managed-node2 sudo[66167]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:45 managed-node2 python3.12[66170]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}} {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:45 managed-node2 systemd[62973]: Started podman-66171.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jul 07 20:15:45 managed-node2 sudo[66167]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:46 managed-node2 python3.12[66333]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:47 managed-node2 python3.12[66645]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:48 managed-node2 python3.12[66806]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:49 managed-node2 python3.12[66963]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:49 managed-node2 python3.12[67119]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:51 managed-node2 python3.12[67275]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:51 managed-node2 python3.12[67432]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:52 managed-node2 python3.12[67588]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:52 managed-node2 python3.12[67744]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:53 managed-node2 sudo[67951]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qbrxmfhyqxuzmhgzlevjndrxencddmhb ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933753.0681589-21169-130280143861349/AnsiballZ_systemd.py' Jul 07 20:15:53 managed-node2 sudo[67951]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:53 managed-node2 python3.12[67954]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:15:53 managed-node2 systemd[62973]: Reload requested from client PID 67957 ('systemctl')... Jul 07 20:15:53 managed-node2 systemd[62973]: Reloading... Jul 07 20:15:53 managed-node2 systemd[62973]: Reloading finished in 49 ms. Jul 07 20:15:53 managed-node2 systemd[62973]: Stopping quadlet-pod-container.service... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 75. Jul 07 20:16:03 managed-node2 quadlet-pod-container[67969]: time="2025-07-07T20:16:03-04:00" level=warning msg="StopSignal SIGTERM failed to stop container quadlet-pod-container in 10 seconds, resorting to SIGKILL" Jul 07 20:16:03 managed-node2 quadlet-pod-container[67969]: f4293ce7df9976771eedba45cd946f75d5668af766c076f83710b7afcc49d748 Jul 07 20:16:03 managed-node2 systemd[62973]: quadlet-pod-container.service: Main process exited, code=exited, status=137/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit UNIT has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 137. Jul 07 20:16:03 managed-node2 systemd[62973]: Removed slice user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice - cgroup user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 76 and the job result is done. Jul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory Jul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory Jul 07 20:16:03 managed-node2 quadlet-pod-pod-pod[68000]: quadlet-pod Jul 07 20:16:03 managed-node2 systemd[62973]: quadlet-pod-container.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'exit-code'. Jul 07 20:16:03 managed-node2 systemd[62973]: Stopped quadlet-pod-container.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 75 and the job result is done. Jul 07 20:16:03 managed-node2 sudo[67951]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory Jul 07 20:16:03 managed-node2 quadlet-pod-pod-pod[68018]: 704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de Jul 07 20:16:04 managed-node2 python3.12[68183]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:16:05 managed-node2 python3.12[68495]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet systemctl list-unit-files | grep quadlet ls -alrtF /etc/containers/systemd /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:06 managed-node2 python3.12[68705]: ansible-ansible.legacy.command Invoked with _raw_params=grep type=AVC /var/log/audit/audit.log _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:06 managed-node2 python3.12[68861]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Cleanup user] ************************************************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:159 Monday 07 July 2025 20:16:06 -0400 (0:00:00.439) 0:01:01.175 *********** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:16:06 -0400 (0:00:00.071) 0:01:01.247 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:16:06 -0400 (0:00:00.047) 0:01:01.294 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:16:06 -0400 (0:00:00.034) 0:01:01.328 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:16:06 -0400 (0:00:00.067) 0:01:01.396 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:16:06 -0400 (0:00:00.038) 0:01:01.434 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:16:06 -0400 (0:00:00.028) 0:01:01.463 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:16:06 -0400 (0:00:00.029) 0:01:01.492 *********** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:16:07 -0400 (0:00:00.064) 0:01:01.557 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:16:07 -0400 (0:00:00.938) 0:01:02.496 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:16:08 -0400 (0:00:00.030) 0:01:02.526 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:16:08 -0400 (0:00:00.035) 0:01:02.561 *********** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:16:08 -0400 (0:00:00.028) 0:01:02.590 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:16:08 -0400 (0:00:00.029) 0:01:02.620 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:16:08 -0400 (0:00:00.029) 0:01:02.650 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.022656", "end": "2025-07-07 20:16:08.471681", "rc": 0, "start": "2025-07-07 20:16:08.449025" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:16:08 -0400 (0:00:00.392) 0:01:03.043 *********** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:16:08 -0400 (0:00:00.032) 0:01:03.075 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:16:08 -0400 (0:00:00.028) 0:01:03.103 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:16:08 -0400 (0:00:00.034) 0:01:03.137 *********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:16:08 -0400 (0:00:00.072) 0:01:03.209 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"5.0\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:16:08 -0400 (0:00:00.051) 0:01:03.261 *********** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:16:08 -0400 (0:00:00.051) 0:01:03.313 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:16:08 -0400 (0:00:00.056) 0:01:03.369 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:16:08 -0400 (0:00:00.032) 0:01:03.402 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:16:08 -0400 (0:00:00.031) 0:01:03.433 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:16:08 -0400 (0:00:00.044) 0:01:03.478 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:16:09 -0400 (0:00:00.381) 0:01:03.859 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003852", "end": "2025-07-07 20:16:09.670116", "rc": 0, "start": "2025-07-07 20:16:09.666264" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:16:09 -0400 (0:00:00.381) 0:01:04.241 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005811", "end": "2025-07-07 20:16:10.055776", "rc": 0, "start": "2025-07-07 20:16:10.049965" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:16:10 -0400 (0:00:00.385) 0:01:04.627 *********** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:16:10 -0400 (0:00:00.046) 0:01:04.673 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:16:10 -0400 (0:00:00.029) 0:01:04.703 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:16:10 -0400 (0:00:00.032) 0:01:04.735 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:16:10 -0400 (0:00:00.029) 0:01:04.765 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:16:10 -0400 (0:00:00.030) 0:01:04.795 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:16:10 -0400 (0:00:00.029) 0:01:04.824 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/home/user_quadlet_pod/.config/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0700", "__podman_parent_path": "/home/user_quadlet_pod/.config/containers", "__podman_policy_json_file": "/home/user_quadlet_pod/.config/containers/policy.json", "__podman_registries_conf_file": "/home/user_quadlet_pod/.config/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/home/user_quadlet_pod/.config/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:16:10 -0400 (0:00:00.040) 0:01:04.865 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:16:10 -0400 (0:00:00.091) 0:01:04.956 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:16:10 -0400 (0:00:00.030) 0:01:04.987 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:16:10 -0400 (0:00:00.028) 0:01:05.015 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:16:10 -0400 (0:00:00.055) 0:01:05.070 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:16:10 -0400 (0:00:00.029) 0:01:05.099 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:16:10 -0400 (0:00:00.028) 0:01:05.128 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:16:10 -0400 (0:00:00.054) 0:01:05.182 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:16:10 -0400 (0:00:00.027) 0:01:05.210 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:16:10 -0400 (0:00:00.028) 0:01:05.238 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:16:10 -0400 (0:00:00.058) 0:01:05.296 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:16:10 -0400 (0:00:00.029) 0:01:05.326 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:16:10 -0400 (0:00:00.027) 0:01:05.354 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:16:10 -0400 (0:00:00.028) 0:01:05.382 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:16:10 -0400 (0:00:00.028) 0:01:05.410 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:16:10 -0400 (0:00:00.027) 0:01:05.438 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:16:11 -0400 (0:00:00.073) 0:01:05.511 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:16:11 -0400 (0:00:00.029) 0:01:05.541 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:16:11 -0400 (0:00:00.025) 0:01:05.567 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:16:11 -0400 (0:00:00.025) 0:01:05.592 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:16:11 -0400 (0:00:00.025) 0:01:05.617 *********** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:16:11 -0400 (0:00:00.024) 0:01:05.641 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:16:11 -0400 (0:00:00.083) 0:01:05.725 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "quadlet-pod-container", "Exec": "/bin/busybox-extras httpd -f -p 80", "Image": "quay.io/libpod/testimage:20210610", "Pod": "quadlet-pod-pod.pod" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:16:11 -0400 (0:00:00.037) 0:01:05.762 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "user_quadlet_pod" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:16:11 -0400 (0:00:00.035) 0:01:05.798 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:16:11 -0400 (0:00:00.029) 0:01:05.827 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-pod-container", "__podman_quadlet_type": "container", "__podman_rootless": true }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:16:11 -0400 (0:00:00.042) 0:01:05.869 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:16:11 -0400 (0:00:00.054) 0:01:05.924 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:16:11 -0400 (0:00:00.032) 0:01:05.956 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:16:11 -0400 (0:00:00.030) 0:01:05.987 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:16:11 -0400 (0:00:00.040) 0:01:06.028 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933380.7065263, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "fa9845e044ad8d1bfcc68a2c8e62c8d83a1bb20e", "ctime": 1751933373.1393917, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 8668983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748217600.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15560, "uid": 0, "version": "1944488044", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:16:11 -0400 (0:00:00.381) 0:01:06.409 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "user_quadlet_pod" ], "delta": "0:00:00.003749", "end": "2025-07-07 20:16:12.218776", "rc": 0, "start": "2025-07-07 20:16:12.215027" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:16:12 -0400 (0:00:00.378) 0:01:06.788 *********** ok: [managed-node2] => { "changed": false, "cmd": [ "getsubids", "-g", "user_quadlet_pod" ], "delta": "0:00:00.005054", "end": "2025-07-07 20:16:12.596303", "rc": 0, "start": "2025-07-07 20:16:12.591249" } STDOUT: 0: user_quadlet_pod 589824 65536 TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:16:12 -0400 (0:00:00.416) 0:01:07.204 *********** ok: [managed-node2] => { "ansible_facts": { "podman_subgid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } }, "podman_subuid_info": { "user_quadlet_pod": { "range": 65536, "start": 589824 } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:16:12 -0400 (0:00:00.045) 0:01:07.249 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:16:12 -0400 (0:00:00.030) 0:01:07.280 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:16:12 -0400 (0:00:00.028) 0:01:07.309 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:16:12 -0400 (0:00:00.029) 0:01:07.339 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:16:12 -0400 (0:00:00.029) 0:01:07.368 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:16:12 -0400 (0:00:00.029) 0:01:07.397 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-pod-container.service", "__podman_systemd_scope": "user", "__podman_user_home_dir": "/home/user_quadlet_pod", "__podman_xdg_runtime_dir": "/run/user/2223" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:16:12 -0400 (0:00:00.048) 0:01:07.446 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/home/user_quadlet_pod/.config/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:16:12 -0400 (0:00:00.030) 0:01:07.476 *********** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:16:13 -0400 (0:00:00.033) 0:01:07.510 *********** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:16:13 -0400 (0:00:00.069) 0:01:07.580 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:16:13 -0400 (0:00:00.036) 0:01:07.616 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 07 July 2025 20:16:13 -0400 (0:00:00.068) 0:01:07.684 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933733.9971163, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 0, "charset": "binary", "ctime": 1751933763.9446528, "dev": 76, "device_type": 0, "executable": true, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 1, "isblk": false, "ischr": false, "isdir": true, "isfifo": false, "isgid": false, "islnk": false, "isreg": false, "issock": false, "isuid": false, "mimetype": "inode/directory", "mode": "0700", "mtime": 1751933763.9446528, "nlink": 7, "path": "/run/user/2223", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": false, "roth": false, "rusr": true, "size": 160, "uid": 2223, "version": null, "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": true } } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 07 July 2025 20:16:13 -0400 (0:00:00.379) 0:01:08.064 *********** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-pod-container.service", "state": "stopped", "status": { "AccessSELinuxContext": "unconfined_u:object_r:user_tmp_t:s0", "ActiveEnterTimestamp": "Mon 2025-07-07 20:15:44 EDT", "ActiveEnterTimestampMonotonic": "761486809", "ActiveExitTimestamp": "Mon 2025-07-07 20:15:53 EDT", "ActiveExitTimestampMonotonic": "770910322", "ActiveState": "failed", "After": "quadlet-pod-pod-pod.service basic.target run-user-2223.mount app.slice podman-user-wait-network-online.service -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Mon 2025-07-07 20:15:44 EDT", "AssertTimestampMonotonic": "761358250", "Before": "shutdown.target default.target", "BindLogSockets": "no", "BindsTo": "quadlet-pod-pod-pod.service", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "291973000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Mon 2025-07-07 20:15:44 EDT", "ConditionTimestampMonotonic": "761358245", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "13011", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-pod-container.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3631038464", "EffectiveMemoryMax": "3631038464", "EffectiveTasksMax": "21944", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service", "ExecMainCode": "1", "ExecMainExitTimestamp": "Mon 2025-07-07 20:16:03 EDT", "ExecMainExitTimestampMonotonic": "781086835", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "65646", "ExecMainStartTimestamp": "Mon 2025-07-07 20:15:44 EDT", "ExecMainStartTimestampMonotonic": "761440001", "ExecMainStatus": "137", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; ignore_errors=no ; start_time=[Mon 2025-07-07 20:15:44 EDT] ; stop_time=[Mon 2025-07-07 20:16:03 EDT] ; pid=65646 ; code=exited ; status=137 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-pod-container --cidfile=/run/user/2223/quadlet-pod-container.cid --replace --rm --cgroups=split --sdnotify=conmon -d --pod-id-file /run/user/2223/quadlet-pod-pod-pod.pod-id quay.io/libpod/testimage:20210610 /bin/busybox-extras httpd -f -p 80 ; flags= ; start_time=[Mon 2025-07-07 20:15:44 EDT] ; stop_time=[Mon 2025-07-07 20:16:03 EDT] ; pid=65646 ; code=exited ; status=137 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=no ; start_time=[Mon 2025-07-07 20:15:53 EDT] ; stop_time=[Mon 2025-07-07 20:16:03 EDT] ; pid=67969 ; code=exited ; status=0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags= ; start_time=[Mon 2025-07-07 20:15:53 EDT] ; stop_time=[Mon 2025-07-07 20:16:03 EDT] ; pid=67969 ; code=exited ; status=0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; ignore_errors=yes ; start_time=[Mon 2025-07-07 20:16:03 EDT] ; stop_time=[Mon 2025-07-07 20:16:03 EDT] ; pid=68002 ; code=exited ; status=0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/user/2223/quadlet-pod-container.cid ; flags=ignore-failure ; start_time=[Mon 2025-07-07 20:16:03 EDT] ; stop_time=[Mon 2025-07-07 20:16:03 EDT] ; pid=68002 ; code=exited ; status=0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/user/2223/systemd/generator/quadlet-pod-container.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-pod-container.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Mon 2025-07-07 20:16:03 EDT", "InactiveEnterTimestampMonotonic": "781157546", "InactiveExitTimestamp": "Mon 2025-07-07 20:15:44 EDT", "InactiveExitTimestampMonotonic": "761365330", "InvocationID": "408ca3a662f84f11bc69eeaa02ba23d4", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "inherit", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13715", "LimitNPROCSoft": "13715", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13715", "LimitSIGPENDINGSoft": "13715", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3614703616", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "34390016", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-pod-container.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "200", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "app.slice basic.target", "RequiresMountsFor": "/run/user/2223/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "exit-code", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "app.slice", "SourcePath": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Mon 2025-07-07 20:16:03 EDT", "StateChangeTimestampMonotonic": "781157546", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "failed", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-pod-container", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "21944", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "quadlet-pod-pod-pod.service default.target", "Wants": "podman-user-wait-network-online.service", "WantsMountsFor": "/home/user_quadlet_pod", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0", "WorkingDirectory": "!/home/user_quadlet_pod" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:34 Monday 07 July 2025 20:16:14 -0400 (0:00:00.670) 0:01:08.735 *********** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1751933743.4072847, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "f0b5c8159fc3c65bf9310a371751609e4c1ba4c3", "ctime": 1751933742.746273, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 2223, "gr_name": "user_quadlet_pod", "inode": 75497741, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751933742.4712682, "nlink": 1, "path": "/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container", "pw_name": "user_quadlet_pod", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 230, "uid": 2223, "version": "446493906", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:39 Monday 07 July 2025 20:16:14 -0400 (0:00:00.384) 0:01:09.119 *********** included: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Monday 07 July 2025 20:16:14 -0400 (0:00:00.052) 0:01:09.172 *********** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Monday 07 July 2025 20:16:15 -0400 (0:00:00.370) 0:01:09.543 *********** fatal: [managed-node2]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } TASK [Dump journal] ************************************************************ task path: /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:194 Monday 07 July 2025 20:16:15 -0400 (0:00:00.076) 0:01:09.619 *********** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.030213", "end": "2025-07-07 20:16:15.452719", "failed_when_result": true, "rc": 0, "start": "2025-07-07 20:16:15.422506" } STDOUT: Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using transient store: false" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Cached value indicated that metacopy is not being used" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Cached value indicated that native-diff is usable" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Initializing event backend file" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=info msg="Setting parallel job count to 7" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d)" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=debug msg="Shutting down engines" Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time="2025-07-07T20:11:58-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=29568 Jul 07 20:11:58 managed-node2 systemd[27434]: Stopped libpod-conmon-82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 85 and the job result is done. Jul 07 20:11:58 managed-node2 systemd[27434]: Removed slice user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice - cgroup user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 84 and the job result is done. Jul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory Jul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory Jul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory Jul 07 20:11:58 managed-node2 podman[29546]: Pods stopped: Jul 07 20:11:58 managed-node2 podman[29546]: 033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824 Jul 07 20:11:58 managed-node2 podman[29546]: Pods removed: Jul 07 20:11:58 managed-node2 podman[29546]: 033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824 Jul 07 20:11:58 managed-node2 podman[29546]: Secrets removed: Jul 07 20:11:58 managed-node2 podman[29546]: Volumes removed: Jul 07 20:11:58 managed-node2 systemd[27434]: Created slice user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice - cgroup user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 86. Jul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-b68ee141dcf27814664a590d406043e669f2802be350ecb975174d3342b509fb.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 90. Jul 07 20:11:58 managed-node2 systemd[27434]: Started rootless-netns-e8ce431d.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 94. Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:11:58 managed-node2 kernel: veth0: entered allmulticast mode Jul 07 20:11:58 managed-node2 kernel: veth0: entered promiscuous mode Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jul 07 20:11:58 managed-node2 systemd[27434]: Started run-p29607-i29907.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 98. Jul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-7f0ec0a3585717aa5dc0675fd2d3f2fb67af120ad764f3608d3ed615db7ae81d.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 102. Jul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-d63f16e390f387a3579fe8190c8b955ae0c9b5f12e78ccbea11550f14575c651.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 107. Jul 07 20:11:58 managed-node2 podman[29546]: Pod: Jul 07 20:11:58 managed-node2 podman[29546]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6 Jul 07 20:11:58 managed-node2 podman[29546]: Container: Jul 07 20:11:58 managed-node2 podman[29546]: d63f16e390f387a3579fe8190c8b955ae0c9b5f12e78ccbea11550f14575c651 Jul 07 20:11:58 managed-node2 systemd[27434]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jul 07 20:11:58 managed-node2 sudo[29540]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:11:59 managed-node2 python3.12[29790]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:11:59 managed-node2 python3.12[29946]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:01 managed-node2 python3.12[30103]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:02 managed-node2 python3.12[30259]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:02 managed-node2 python3.12[30414]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:12:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:12:03 managed-node2 podman[30600]: 2025-07-07 20:12:03.454853091 -0400 EDT m=+0.369862355 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:03 managed-node2 python3.12[30769]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:12:04 managed-node2 python3.12[30924]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:04 managed-node2 python3.12[31079]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:12:05 managed-node2 python3.12[31204]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933524.4345405-13159-278912456486895/.source.yml _original_basename=.wig720_j follow=False checksum=b4126723a3845d354fb7beda3b3f44919cb02dd7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.511414005 -0400 EDT m=+0.019368086 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:12:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice - cgroup machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice. ░░ Subject: A start job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished successfully. ░░ ░░ The job identifier is 1968. Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.55953636 -0400 EDT m=+0.067490307 container create a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.565742112 -0400 EDT m=+0.073696026 pod create 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.592586777 -0400 EDT m=+0.100540709 container create f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.567663828 -0400 EDT m=+0.075617914 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:12:05 managed-node2 kernel: veth0: entered allmulticast mode Jul 07 20:12:05 managed-node2 kernel: veth0: entered promiscuous mode Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6188] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6207] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jul 07 20:12:05 managed-node2 (udev-worker)[31379]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jul 07 20:12:05 managed-node2 (udev-worker)[31378]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6283] device (veth0): carrier: link connected Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6285] device (podman1): carrier: link connected Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6369] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6382] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6387] device (podman1): Activation: starting connection 'podman1' (1839a03b-e916-4e56-ad20-699cf8b9a55a) Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6389] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6402] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6406] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6413] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1974. Jul 07 20:12:05 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1974. Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7043] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7046] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7051] device (podman1): Activation: successful, device activated. Jul 07 20:12:05 managed-node2 systemd[1]: Started run-p31412-i31712.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p31412-i31712.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p31412-i31712.scope has finished successfully. ░░ ░░ The job identifier is 2053. Jul 07 20:12:05 managed-node2 aardvark-dns[31412]: starting aardvark on a child with pid 31419 Jul 07 20:12:05 managed-node2 aardvark-dns[31419]: Successfully parsed config Jul 07 20:12:05 managed-node2 aardvark-dns[31419]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jul 07 20:12:05 managed-node2 aardvark-dns[31419]: Listen v6 ip {} Jul 07 20:12:05 managed-node2 aardvark-dns[31419]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53] Jul 07 20:12:05 managed-node2 systemd[1]: Started libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope. ░░ Subject: A start job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully. ░░ ░░ The job identifier is 2059. Jul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach} Jul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : terminal_ctrl_fd: 12 Jul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : winsz read side: 16, winsz write side: 17 Jul 07 20:12:05 managed-node2 systemd[1]: Started libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope - libcrun container. ░░ Subject: A start job for unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully. ░░ ░░ The job identifier is 2066. Jul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : container PID: 31426 Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.796580679 -0400 EDT m=+0.304534648 container init a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.799350929 -0400 EDT m=+0.307304949 container start a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:05 managed-node2 systemd[1]: Started libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope. ░░ Subject: A start job for unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully. ░░ ░░ The job identifier is 2073. Jul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/11/attach} Jul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : terminal_ctrl_fd: 11 Jul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : winsz read side: 15, winsz write side: 16 Jul 07 20:12:05 managed-node2 systemd[1]: Started libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope - libcrun container. ░░ Subject: A start job for unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully. ░░ ░░ The job identifier is 2080. Jul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : container PID: 31431 Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.854777496 -0400 EDT m=+0.362731459 container init f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.857558887 -0400 EDT m=+0.365512915 container start f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.863542588 -0400 EDT m=+0.371496538 pod start 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2) Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b Container: f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-07T20:12:05-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-07T20:12:05-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-07T20:12:05-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-07T20:12:05-04:00" level=info msg="Using sqlite as database backend" time="2025-07-07T20:12:05-04:00" level=debug msg="Using graph driver overlay" time="2025-07-07T20:12:05-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-07T20:12:05-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-07T20:12:05-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-07T20:12:05-04:00" level=debug msg="Using transient store: false" time="2025-07-07T20:12:05-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-07T20:12:05-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-07T20:12:05-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-07T20:12:05-04:00" level=debug msg="Initializing event backend journald" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-07T20:12:05-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-07-07T20:12:05-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-07T20:12:05-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba bridge podman1 2025-07-07 20:10:03.41385383 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-07T20:12:05-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-07T20:12:05-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string" time="2025-07-07T20:12:05-04:00" level=debug msg="using systemd mode: false" time="2025-07-07T20:12:05-04:00" level=debug msg="setting container name 2dbe050d31da-infra" time="2025-07-07T20:12:05-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Allocated lock 1 for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are supported" time="2025-07-07T20:12:05-04:00" level=debug msg="Created container \"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Container \"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\" has work directory \"/var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Container \"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\" has run directory \"/run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-07T20:12:05-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-07T20:12:05-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-07-07T20:12:05-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-07T20:12:05-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-07T20:12:05-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-07-07T20:12:05-04:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-07-07T20:12:05-04:00" level=debug msg="parsed reference into \"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-07-07T20:12:05-04:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-07T20:12:05-04:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-07-07T20:12:05-04:00" level=debug msg="using systemd mode: false" time="2025-07-07T20:12:05-04:00" level=debug msg="adding container to pod httpd2" time="2025-07-07T20:12:05-04:00" level=debug msg="setting container name httpd2-httpd2" time="2025-07-07T20:12:05-04:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-07-07T20:12:05-04:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /proc" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /dev" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /dev/pts" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /dev/mqueue" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /sys" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-07-07T20:12:05-04:00" level=debug msg="Allocated lock 2 for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b" time="2025-07-07T20:12:05-04:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Created container \"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Container \"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\" has work directory \"/var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Container \"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\" has run directory \"/run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Strongconnecting node a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="Pushed a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 onto stack" time="2025-07-07T20:12:05-04:00" level=debug msg="Finishing node a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307. Popped a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 off stack" time="2025-07-07T20:12:05-04:00" level=debug msg="Strongconnecting node f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b" time="2025-07-07T20:12:05-04:00" level=debug msg="Pushed f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b onto stack" time="2025-07-07T20:12:05-04:00" level=debug msg="Finishing node f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b. Popped f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b off stack" time="2025-07-07T20:12:05-04:00" level=debug msg="Made network namespace at /run/netns/netns-1ce7c5df-883b-4dd4-e4c5-5e8b3ad8b1f6 for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="Created root filesystem for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 at /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/rootfs/merge" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24 [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source [INFO netavark::firewall::nft] Creating container chain nv_51bfc940_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "/usr/libexec/podman/aardvark-dns", "--config", "/run/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "ce:aa:8c:aa:62:92", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-07-07T20:12:05-04:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-07-07T20:12:05-04:00" level=debug msg="Setting Cgroups for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 to machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice:libpod:a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-07T20:12:05-04:00" level=debug msg="Workdir \"/\" resolved to host path \"/var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/rootfs/merge\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Created OCI spec for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 at /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/config.json" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-07T20:12:05-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 -u a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata -p /run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/pidfile -n 2dbe050d31da-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307]" time="2025-07-07T20:12:05-04:00" level=info msg="Running conmon under slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice and unitName libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope" time="2025-07-07T20:12:05-04:00" level=debug msg="Received: 31426" time="2025-07-07T20:12:05-04:00" level=info msg="Got Conmon PID as 31424" time="2025-07-07T20:12:05-04:00" level=debug msg="Created container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 in OCI runtime" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-07-07T20:12:05-04:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-07-07T20:12:05-04:00" level=debug msg="Starting container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 with command [/catatonit -P]" time="2025-07-07T20:12:05-04:00" level=debug msg="Started container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307" time="2025-07-07T20:12:05-04:00" level=debug msg="overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/66KUKQ3YMIWXUMPTPGDU24SJUU,upperdir=/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/diff,workdir=/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/work,nodev,metacopy=on,context=\"system_u:object_r:container_file_t:s0:c198,c290\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Mounted container \"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\" at \"/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/merged\"" time="2025-07-07T20:12:05-04:00" level=debug msg="Created root filesystem for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b at /var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/merged" time="2025-07-07T20:12:05-04:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-07-07T20:12:05-04:00" level=debug msg="Setting Cgroups for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b to machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice:libpod:f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b" time="2025-07-07T20:12:05-04:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-07-07T20:12:05-04:00" level=debug msg="Workdir \"/var/www\" resolved to a volume or mount" time="2025-07-07T20:12:05-04:00" level=debug msg="Created OCI spec for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b at /var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/config.json" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b" time="2025-07-07T20:12:05-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice" time="2025-07-07T20:12:05-04:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-07-07T20:12:05-04:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b -u f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata -p /run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b]" time="2025-07-07T20:12:05-04:00" level=info msg="Running conmon under slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice and unitName libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope" time="2025-07-07T20:12:05-04:00" level=debug msg="Received: 31431" time="2025-07-07T20:12:05-04:00" level=info msg="Got Conmon PID as 31429" time="2025-07-07T20:12:05-04:00" level=debug msg="Created container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b in OCI runtime" time="2025-07-07T20:12:05-04:00" level=debug msg="Starting container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b with command [/bin/busybox-extras httpd -f -p 80]" time="2025-07-07T20:12:05-04:00" level=debug msg="Started container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b" time="2025-07-07T20:12:05-04:00" level=debug msg="Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-07T20:12:05-04:00" level=debug msg="Shutting down engines" Jul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 07 20:12:06 managed-node2 python3.12[31587]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:12:06 managed-node2 systemd[1]: Reload requested from client PID 31588 ('systemctl') (unit session-5.scope)... Jul 07 20:12:06 managed-node2 systemd[1]: Reloading... Jul 07 20:12:06 managed-node2 systemd-rc-local-generator[31626]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:06 managed-node2 systemd[1]: Reloading finished in 201 ms. Jul 07 20:12:07 managed-node2 python3.12[31800]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jul 07 20:12:07 managed-node2 systemd[1]: Reload requested from client PID 31803 ('systemctl') (unit session-5.scope)... Jul 07 20:12:07 managed-node2 systemd[1]: Reloading... Jul 07 20:12:07 managed-node2 systemd-rc-local-generator[31851]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:07 managed-node2 systemd[1]: Reloading finished in 212 ms. Jul 07 20:12:08 managed-node2 python3.12[32015]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:12:08 managed-node2 systemd[1]: Created slice system-podman\x2dkube.slice - Slice /system/podman-kube. ░░ Subject: A start job for unit system-podman\x2dkube.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit system-podman\x2dkube.slice has finished successfully. ░░ ░░ The job identifier is 2088. Jul 07 20:12:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2087. Jul 07 20:12:08 managed-node2 podman[32019]: 2025-07-07 20:12:08.118637911 -0400 EDT m=+0.022713444 pod stop 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2) Jul 07 20:12:15 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 podman[32019]: time="2025-07-07T20:12:18-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jul 07 20:12:18 managed-node2 systemd[1]: libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : container 31431 exited with status 137 Jul 07 20:12:18 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice/libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope/container/memory.events Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.143617522 -0400 EDT m=+10.047693144 container died f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b)" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=info msg="Using sqlite as database backend" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using graph driver overlay" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using run root /run/containers/storage" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using transient store: false" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Initializing event backend journald" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=info msg="Setting parallel job count to 7" Jul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856-merged.mount has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.191389905 -0400 EDT m=+10.095465414 container cleanup f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b)" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=debug msg="Shutting down engines" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time="2025-07-07T20:12:18-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=32031 Jul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 systemd[1]: libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 systemd[1]: libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice/libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope/container/memory.events Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.211877781 -0400 EDT m=+10.115953439 container died a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra) Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307)" Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:12:18 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:12:18 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=info msg="Using sqlite as database backend" Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: Received SIGHUP Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: Successfully parsed config Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: Listen v4 ip {} Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: Listen v6 ip {} Jul 07 20:12:18 managed-node2 aardvark-dns[31419]: No configuration found stopping the sever Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using graph driver overlay" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using graph root /var/lib/containers/storage" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using run root /run/containers/storage" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using tmp dir /run/libpod" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using transient store: false" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that overlay is supported" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that metacopy is being used" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Cached value indicated that native-diff is not being used" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Initializing event backend journald" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jul 07 20:12:18 managed-node2 systemd[1]: run-p31412-i31712.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p31412-i31712.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=info msg="Setting parallel job count to 7" Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.2554] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 07 20:12:18 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2172. Jul 07 20:12:18 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2172. Jul 07 20:12:18 managed-node2 systemd[1]: run-netns-netns\x2d1ce7c5df\x2d883b\x2d4dd4\x2de4c5\x2d5e8b3ad8b1f6.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d1ce7c5df\x2d883b\x2d4dd4\x2de4c5\x2d5e8b3ad8b1f6.mount has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.329738431 -0400 EDT m=+10.233814039 container cleanup a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307)" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=debug msg="Shutting down engines" Jul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time="2025-07-07T20:12:18-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=32043 Jul 07 20:12:18 managed-node2 systemd[1]: libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has successfully entered the 'dead' state. Jul 07 20:12:18 managed-node2 systemd[1]: Stopped libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope. ░░ Subject: A stop job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished. ░░ ░░ The job identifier is 2252 and the job result is done. Jul 07 20:12:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice - cgroup machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice. ░░ Subject: A stop job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished. ░░ ░░ The job identifier is 2251 and the job result is done. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.395138747 -0400 EDT m=+10.299214275 container remove f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.421655838 -0400 EDT m=+10.325731365 container remove a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b) Jul 07 20:12:18 managed-node2 systemd[1]: machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice: Failed to open /run/systemd/transient/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice: No such file or directory Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.430187452 -0400 EDT m=+10.334262942 pod remove 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2) Jul 07 20:12:18 managed-node2 podman[32019]: Pods stopped: Jul 07 20:12:18 managed-node2 podman[32019]: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b Jul 07 20:12:18 managed-node2 podman[32019]: Pods removed: Jul 07 20:12:18 managed-node2 podman[32019]: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b Jul 07 20:12:18 managed-node2 podman[32019]: Secrets removed: Jul 07 20:12:18 managed-node2 podman[32019]: Volumes removed: Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.430564473 -0400 EDT m=+10.334640043 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.451338654 -0400 EDT m=+10.355414196 container create 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice - cgroup machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice. ░░ Subject: A start job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished successfully. ░░ ░░ The job identifier is 2253. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.492350688 -0400 EDT m=+10.396426207 container create 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.498774654 -0400 EDT m=+10.402850152 pod create 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.52421506 -0400 EDT m=+10.428290660 container create 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.524550628 -0400 EDT m=+10.428626163 container restart 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 systemd[1]: Started libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope - libcrun container. ░░ Subject: A start job for unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has finished successfully. ░░ ░░ The job identifier is 2259. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.500507692 -0400 EDT m=+10.404583357 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.584365043 -0400 EDT m=+10.488440589 container init 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.587275237 -0400 EDT m=+10.491350731 container start 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:12:18 managed-node2 kernel: veth0: entered allmulticast mode Jul 07 20:12:18 managed-node2 kernel: veth0: entered promiscuous mode Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jul 07 20:12:18 managed-node2 (udev-worker)[32052]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:18 managed-node2 (udev-worker)[32051]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6102] device (podman1): carrier: link connected Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6105] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6153] device (veth0): carrier: link connected Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6156] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6318] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6330] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6372] device (podman1): Activation: starting connection 'podman1' (d4ed169d-27e3-42b9-8610-eac77be55153) Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6374] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6376] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6377] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6380] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6494] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6511] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6517] device (podman1): Activation: successful, device activated. Jul 07 20:12:18 managed-node2 systemd[1]: Started run-p32110-i32410.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p32110-i32410.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p32110-i32410.scope has finished successfully. ░░ ░░ The job identifier is 2265. Jul 07 20:12:18 managed-node2 systemd[1]: Started libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope - libcrun container. ░░ Subject: A start job for unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has finished successfully. ░░ ░░ The job identifier is 2271. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.743416519 -0400 EDT m=+10.647492104 container init 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.745567921 -0400 EDT m=+10.649643561 container start 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:12:18 managed-node2 systemd[1]: Started libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope - libcrun container. ░░ Subject: A start job for unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has finished successfully. ░░ ░░ The job identifier is 2278. Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.78342779 -0400 EDT m=+10.687503327 container init 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.786203333 -0400 EDT m=+10.690278905 container start 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.792190652 -0400 EDT m=+10.696266270 pod start 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2) Jul 07 20:12:18 managed-node2 podman[32019]: Pod: Jul 07 20:12:18 managed-node2 podman[32019]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 Jul 07 20:12:18 managed-node2 podman[32019]: Container: Jul 07 20:12:18 managed-node2 podman[32019]: 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd Jul 07 20:12:18 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished successfully. ░░ ░░ The job identifier is 2087. Jul 07 20:12:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:12:19 managed-node2 python3.12[32279]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:20 managed-node2 python3.12[32436]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:21 managed-node2 python3.12[32592]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:22 managed-node2 python3.12[32747]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:23 managed-node2 podman[32933]: 2025-07-07 20:12:23.116451069 -0400 EDT m=+0.400655980 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:23 managed-node2 python3.12[33102]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:23 managed-node2 python3.12[33257]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:24 managed-node2 python3.12[33412]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:12:24 managed-node2 python3.12[33537]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933544.127541-13777-126359990981212/.source.yml _original_basename=.mz8q_k1v follow=False checksum=c8e8f54a2e9107a24008cfb6f1d2d59b89d86a42 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:25 managed-node2 python3.12[33692]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.227950329 -0400 EDT m=+0.015681172 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:12:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice - cgroup machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice. ░░ Subject: A start job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished successfully. ░░ ░░ The job identifier is 2285. Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.26795522 -0400 EDT m=+0.055686154 container create 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.274184949 -0400 EDT m=+0.061915786 pod create 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.300940456 -0400 EDT m=+0.088671322 container create bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:12:25 managed-node2 kernel: veth1: entered allmulticast mode Jul 07 20:12:25 managed-node2 kernel: veth1: entered promiscuous mode Jul 07 20:12:25 managed-node2 NetworkManager[714]: [1751933545.3256] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7) Jul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.275812121 -0400 EDT m=+0.063543116 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:25 managed-node2 (udev-worker)[33710]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:25 managed-node2 NetworkManager[714]: [1751933545.3293] device (veth1): carrier: link connected Jul 07 20:12:25 managed-node2 systemd[1]: Started libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope. ░░ Subject: A start job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully. ░░ ░░ The job identifier is 2291. Jul 07 20:12:25 managed-node2 systemd[1]: Started libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope - libcrun container. ░░ Subject: A start job for unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully. ░░ ░░ The job identifier is 2298. Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.421199278 -0400 EDT m=+0.208930271 container init 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.423916963 -0400 EDT m=+0.211647873 container start 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:25 managed-node2 systemd[1]: Started libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope. ░░ Subject: A start job for unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully. ░░ ░░ The job identifier is 2305. Jul 07 20:12:25 managed-node2 systemd[1]: Started libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope - libcrun container. ░░ Subject: A start job for unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully. ░░ ░░ The job identifier is 2312. Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.47967795 -0400 EDT m=+0.267408864 container init bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.482134977 -0400 EDT m=+0.269865956 container start bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.48810857 -0400 EDT m=+0.275839437 pod start 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:26 managed-node2 python3.12[33903]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:12:26 managed-node2 systemd[1]: Reload requested from client PID 33904 ('systemctl') (unit session-5.scope)... Jul 07 20:12:26 managed-node2 systemd[1]: Reloading... Jul 07 20:12:26 managed-node2 systemd-rc-local-generator[33949]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:26 managed-node2 systemd[1]: Reloading finished in 216 ms. Jul 07 20:12:26 managed-node2 python3.12[34116]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None Jul 07 20:12:26 managed-node2 systemd[1]: Reload requested from client PID 34119 ('systemctl') (unit session-5.scope)... Jul 07 20:12:26 managed-node2 systemd[1]: Reloading... Jul 07 20:12:27 managed-node2 systemd-rc-local-generator[34169]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:27 managed-node2 systemd[1]: Reloading finished in 222 ms. Jul 07 20:12:27 managed-node2 python3.12[34331]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:12:27 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2319. Jul 07 20:12:27 managed-node2 podman[34335]: 2025-07-07 20:12:27.799634553 -0400 EDT m=+0.024842125 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:28 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: time="2025-07-07T20:12:37-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jul 07 20:12:37 managed-node2 systemd[1]: libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.830033292 -0400 EDT m=+10.055241268 container died bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-404f57844fa9dde2639f08876faa04d4c046f22836d60e203fb44096347d56de-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-404f57844fa9dde2639f08876faa04d4c046f22836d60e203fb44096347d56de-merged.mount has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.871361891 -0400 EDT m=+10.096569436 container cleanup bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jul 07 20:12:37 managed-node2 systemd[1]: libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 systemd[1]: libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.891562873 -0400 EDT m=+10.116770720 container died 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra) Jul 07 20:12:37 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:12:37 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jul 07 20:12:37 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jul 07 20:12:37 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:12:37 managed-node2 systemd[1]: run-netns-netns\x2d9f683135\x2dcb1d\x2d6825\x2d135b\x2df344c71f6412.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d9f683135\x2dcb1d\x2d6825\x2d135b\x2df344c71f6412.mount has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.960143035 -0400 EDT m=+10.185350606 container cleanup 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:37 managed-node2 systemd[1]: Stopping libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope... ░░ Subject: A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has begun execution. ░░ ░░ The job identifier is 2405. Jul 07 20:12:37 managed-node2 systemd[1]: libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has successfully entered the 'dead' state. Jul 07 20:12:37 managed-node2 systemd[1]: Stopped libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope. ░░ Subject: A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished. ░░ ░░ The job identifier is 2405 and the job result is done. Jul 07 20:12:37 managed-node2 systemd[1]: Removed slice machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice - cgroup machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice. ░░ Subject: A stop job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished. ░░ ░░ The job identifier is 2404 and the job result is done. Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.968733882 -0400 EDT m=+10.193941424 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:37 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.973070119 -0400 EDT m=+10.198277748 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:37 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory Jul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.999456374 -0400 EDT m=+10.224663949 container remove bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.025619131 -0400 EDT m=+10.250826702 container remove 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94) Jul 07 20:12:38 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.034330595 -0400 EDT m=+10.259538132 pod remove 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3) Jul 07 20:12:38 managed-node2 podman[34335]: Pods stopped: Jul 07 20:12:38 managed-node2 podman[34335]: 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 Jul 07 20:12:38 managed-node2 podman[34335]: Pods removed: Jul 07 20:12:38 managed-node2 podman[34335]: 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 Jul 07 20:12:38 managed-node2 podman[34335]: Secrets removed: Jul 07 20:12:38 managed-node2 podman[34335]: Volumes removed: Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.034440895 -0400 EDT m=+10.259648519 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.055160997 -0400 EDT m=+10.280368656 container create f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 systemd[1]: Created slice machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice - cgroup machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice. ░░ Subject: A start job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished successfully. ░░ ░░ The job identifier is 2406. Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.089857565 -0400 EDT m=+10.315065108 container create afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.096374067 -0400 EDT m=+10.321581608 pod create f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.121708454 -0400 EDT m=+10.346916101 container create 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.122052483 -0400 EDT m=+10.347260058 container restart f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 systemd[1]: Started libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope - libcrun container. ░░ Subject: A start job for unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has finished successfully. ░░ ░░ The job identifier is 2412. Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.09796996 -0400 EDT m=+10.323177689 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.161037652 -0400 EDT m=+10.386245273 container init f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.163287704 -0400 EDT m=+10.388495346 container start f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:12:38 managed-node2 kernel: veth1: entered allmulticast mode Jul 07 20:12:38 managed-node2 kernel: veth1: entered promiscuous mode Jul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered blocking state Jul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state Jul 07 20:12:38 managed-node2 (udev-worker)[34367]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:12:38 managed-node2 NetworkManager[714]: [1751933558.1845] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jul 07 20:12:38 managed-node2 NetworkManager[714]: [1751933558.1884] device (veth1): carrier: link connected Jul 07 20:12:38 managed-node2 systemd[1]: Started libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope - libcrun container. ░░ Subject: A start job for unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has finished successfully. ░░ ░░ The job identifier is 2418. Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.256569895 -0400 EDT m=+10.481777617 container init afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.259020833 -0400 EDT m=+10.484228554 container start afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:12:38 managed-node2 systemd[1]: Started libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope - libcrun container. ░░ Subject: A start job for unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has finished successfully. ░░ ░░ The job identifier is 2425. Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.305507767 -0400 EDT m=+10.530715363 container init 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.307690208 -0400 EDT m=+10.532897848 container start 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.313601662 -0400 EDT m=+10.538809232 pod start f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:12:38 managed-node2 podman[34335]: Pod: Jul 07 20:12:38 managed-node2 podman[34335]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f Jul 07 20:12:38 managed-node2 podman[34335]: Container: Jul 07 20:12:38 managed-node2 podman[34335]: 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e Jul 07 20:12:38 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished successfully. ░░ ░░ The job identifier is 2319. Jul 07 20:12:39 managed-node2 sudo[34620]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pxlwmudvcyywtlojiblkxiobcxywcxdd ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933558.7922473-14198-135957250003670/AnsiballZ_command.py' Jul 07 20:12:39 managed-node2 sudo[34620]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:12:39 managed-node2 python3.12[34623]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:39 managed-node2 systemd[27434]: Started podman-34631.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 112. Jul 07 20:12:39 managed-node2 sudo[34620]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:12:39 managed-node2 python3.12[34794]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:40 managed-node2 python3.12[34957]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:40 managed-node2 sudo[35170]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-edeafswnzssmkzeqkidxujvilmmhsinz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933560.1733143-14258-164223741543922/AnsiballZ_command.py' Jul 07 20:12:40 managed-node2 sudo[35170]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:12:40 managed-node2 python3.12[35173]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:40 managed-node2 sudo[35170]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:12:40 managed-node2 python3.12[35331]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:41 managed-node2 python3.12[35489]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:42 managed-node2 python3.12[35647]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:42 managed-node2 python3.12[35804]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:43 managed-node2 python3.12[35960]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd1-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:43 managed-node2 python3.12[36116]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd2-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:43 managed-node2 python3.12[36272]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd3-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:46 managed-node2 python3.12[36583]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:47 managed-node2 python3.12[36744]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:50 managed-node2 python3.12[36901]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jul 07 20:12:51 managed-node2 python3.12[37057]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jul 07 20:12:51 managed-node2 python3.12[37214]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jul 07 20:12:52 managed-node2 python3.12[37371]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jul 07 20:12:54 managed-node2 python3.12[37526]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jul 07 20:12:55 managed-node2 python3.12[37683]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jul 07 20:12:55 managed-node2 python3.12[37839]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jul 07 20:12:56 managed-node2 python3.12[37996]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jul 07 20:12:58 managed-node2 python3.12[38193]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jul 07 20:12:59 managed-node2 python3.12[38348]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jul 07 20:13:03 managed-node2 python3.12[38503]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 07 20:13:03 managed-node2 python3.12[38659]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:04 managed-node2 python3.12[38817]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:04 managed-node2 python3.12[38973]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:05 managed-node2 python3.12[39129]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:06 managed-node2 python3.12[39285]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 07 20:13:07 managed-node2 python3.12[39440]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:07 managed-node2 python3.12[39595]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:07 managed-node2 sudo[39800]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sggfuwotsbnqelqoxqdpnxcztrgpfxov ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933587.546744-15228-221616196309386/AnsiballZ_podman_image.py' Jul 07 20:13:07 managed-node2 sudo[39800]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:07 managed-node2 systemd[27434]: Started podman-39804.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 116. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39811.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 120. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39818.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 124. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39825.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 128. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39832.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 132. Jul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39839.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 136. Jul 07 20:13:08 managed-node2 sudo[39800]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:08 managed-node2 python3.12[40000]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:09 managed-node2 python3.12[40157]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:09 managed-node2 python3.12[40312]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:10 managed-node2 python3.12[40390]: ansible-ansible.legacy.file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=.g46gggh2 recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:10 managed-node2 sudo[40595]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lwtednmiooezcolvhoaprqmyaqljtcnz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933590.280253-15331-51302432010459/AnsiballZ_podman_play.py' Jul 07 20:13:10 managed-node2 sudo[40595]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:13:10 managed-node2 systemd[27434]: Started podman-40605.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 140. Jul 07 20:13:10 managed-node2 systemd[27434]: Created slice user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice - cgroup user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 144. Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-07T20:13:10-04:00" level=info msg="/bin/podman filtering at log level debug" time="2025-07-07T20:13:10-04:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-07-07T20:13:10-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-07T20:13:10-04:00" level=info msg="Using sqlite as database backend" time="2025-07-07T20:13:10-04:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-07-07T20:13:10-04:00" level=debug msg="Using graph driver overlay" time="2025-07-07T20:13:10-04:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-07-07T20:13:10-04:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-07-07T20:13:10-04:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-07-07T20:13:10-04:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-07-07T20:13:10-04:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-07-07T20:13:10-04:00" level=debug msg="Using transient store: false" time="2025-07-07T20:13:10-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-07T20:13:10-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:13:10-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:13:10-04:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-07-07T20:13:10-04:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-07-07T20:13:10-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-07-07T20:13:10-04:00" level=debug msg="Initializing event backend file" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-07T20:13:10-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-07-07T20:13:10-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-07T20:13:10-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 8ffa2dc6ff76aec6ab19c0699b0508615d79e3b6d14dd7cd78be0f62c7718f3e bridge podman1 2025-07-07 20:11:45.408967969 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-07T20:13:10-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-07T20:13:10-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-07T20:13:10-04:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice for parent user.slice and name libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d" time="2025-07-07T20:13:10-04:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice" time="2025-07-07T20:13:10-04:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice" Error: adding pod to state: name "httpd1" is in use: pod already exists time="2025-07-07T20:13:10-04:00" level=debug msg="Shutting down engines" time="2025-07-07T20:13:10-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=40605 Jul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 07 20:13:10 managed-node2 sudo[40595]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:11 managed-node2 python3.12[40767]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:13:12 managed-node2 python3.12[40923]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:13 managed-node2 python3.12[41080]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:14 managed-node2 python3.12[41236]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:14 managed-node2 python3.12[41391]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:15 managed-node2 podman[41576]: 2025-07-07 20:13:15.556653463 -0400 EDT m=+0.319976011 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:13:15 managed-node2 python3.12[41746]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:16 managed-node2 python3.12[41903]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:16 managed-node2 python3.12[42058]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:17 managed-node2 python3.12[42136]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=.cnp2b107 recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:13:17 managed-node2 podman[42298]: 2025-07-07 20:13:17.610100559 -0400 EDT m=+0.016705745 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:13:17 managed-node2 systemd[1]: Created slice machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice - cgroup machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice. ░░ Subject: A start job for unit machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice has finished successfully. ░░ ░░ The job identifier is 2432. Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-07-07T20:13:17-04:00" level=info msg="/usr/bin/podman filtering at log level debug" time="2025-07-07T20:13:17-04:00" level=debug msg="Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)" time="2025-07-07T20:13:17-04:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-07-07T20:13:17-04:00" level=info msg="Using sqlite as database backend" time="2025-07-07T20:13:17-04:00" level=debug msg="Using graph driver overlay" time="2025-07-07T20:13:17-04:00" level=debug msg="Using graph root /var/lib/containers/storage" time="2025-07-07T20:13:17-04:00" level=debug msg="Using run root /run/containers/storage" time="2025-07-07T20:13:17-04:00" level=debug msg="Using static dir /var/lib/containers/storage/libpod" time="2025-07-07T20:13:17-04:00" level=debug msg="Using tmp dir /run/libpod" time="2025-07-07T20:13:17-04:00" level=debug msg="Using volume path /var/lib/containers/storage/volumes" time="2025-07-07T20:13:17-04:00" level=debug msg="Using transient store: false" time="2025-07-07T20:13:17-04:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-07-07T20:13:17-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:13:17-04:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-07-07T20:13:17-04:00" level=debug msg="Cached value indicated that metacopy is being used" time="2025-07-07T20:13:17-04:00" level=debug msg="Cached value indicated that native-diff is not being used" time="2025-07-07T20:13:17-04:00" level=info msg="Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled" time="2025-07-07T20:13:17-04:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true" time="2025-07-07T20:13:17-04:00" level=debug msg="Initializing event backend journald" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-07-07T20:13:17-04:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-07-07T20:13:17-04:00" level=info msg="Setting parallel job count to 7" time="2025-07-07T20:13:17-04:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba bridge podman1 2025-07-07 20:10:03.41385383 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-07-07T20:13:17-04:00" level=debug msg="Successfully loaded 2 networks" time="2025-07-07T20:13:17-04:00" level=debug msg="Pod using bridge network mode" time="2025-07-07T20:13:17-04:00" level=debug msg="Created cgroup path machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice for parent machine.slice and name libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9" time="2025-07-07T20:13:17-04:00" level=debug msg="Created cgroup machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice" time="2025-07-07T20:13:17-04:00" level=debug msg="Got pod cgroup as machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice" Error: adding pod to state: name "httpd2" is in use: pod already exists time="2025-07-07T20:13:17-04:00" level=debug msg="Shutting down engines" time="2025-07-07T20:13:17-04:00" level=info msg="Received shutdown.Stop(), terminating!" PID=42298 Jul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jul 07 20:13:18 managed-node2 python3.12[42459]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:20 managed-node2 python3.12[42616]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:20 managed-node2 python3.12[42772]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:21 managed-node2 python3.12[42927]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:22 managed-node2 podman[43112]: 2025-07-07 20:13:22.329599025 -0400 EDT m=+0.443555601 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:13:22 managed-node2 python3.12[43281]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:23 managed-node2 python3.12[43438]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:23 managed-node2 python3.12[43593]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:24 managed-node2 python3.12[43671]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=.garaxq8q recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:24 managed-node2 python3.12[43826]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:13:24 managed-node2 podman[43834]: 2025-07-07 20:13:24.584154544 -0400 EDT m=+0.014632770 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge) Jul 07 20:13:24 managed-node2 systemd[1]: Created slice machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice - cgroup machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice. ░░ Subject: A start job for unit machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice has finished successfully. ░░ ░░ The job identifier is 2438. Jul 07 20:13:25 managed-node2 sudo[44045]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fkixybyzrrzuqcjcfmgozxwfwmajookd ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933605.3310475-16168-26421251595513/AnsiballZ_command.py' Jul 07 20:13:25 managed-node2 sudo[44045]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:25 managed-node2 python3.12[44048]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:25 managed-node2 systemd[27434]: Started podman-44056.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 148. Jul 07 20:13:25 managed-node2 sudo[44045]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:26 managed-node2 python3.12[44220]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:26 managed-node2 python3.12[44383]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:26 managed-node2 sudo[44596]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ydnvtlhbpgwizyiplpnqjwanppjzhlbg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933606.7243044-16231-16521388663911/AnsiballZ_command.py' Jul 07 20:13:26 managed-node2 sudo[44596]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:27 managed-node2 python3.12[44599]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:27 managed-node2 sudo[44596]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:27 managed-node2 python3.12[44757]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:27 managed-node2 python3.12[44915]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:28 managed-node2 python3.12[45073]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:28 managed-node2 python3.12[45229]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:29 managed-node2 python3.12[45385]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:31 managed-node2 python3.12[45696]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:32 managed-node2 python3.12[45857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:36 managed-node2 python3.12[46014]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 07 20:13:36 managed-node2 python3.12[46170]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:37 managed-node2 python3.12[46327]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:37 managed-node2 python3.12[46483]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:38 managed-node2 python3.12[46639]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:39 managed-node2 python3.12[46795]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:39 managed-node2 sudo[47002]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cdbeefpvsgoajxqmxzaiihkscmhhxllh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933619.5337389-16868-45510891100291/AnsiballZ_systemd.py' Jul 07 20:13:39 managed-node2 sudo[47002]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:40 managed-node2 python3.12[47005]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:13:40 managed-node2 systemd[27434]: Reload requested from client PID 47008 ('systemctl')... Jul 07 20:13:40 managed-node2 systemd[27434]: Reloading... Jul 07 20:13:40 managed-node2 systemd[27434]: Reloading finished in 62 ms. Jul 07 20:13:40 managed-node2 systemd[27434]: Stopping podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 152. Jul 07 20:13:50 managed-node2 podman[47019]: time="2025-07-07T20:13:50-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL" Jul 07 20:13:50 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:13:50 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:13:50 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:13:50 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:13:50 managed-node2 systemd[27434]: Removed slice user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice - cgroup user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 153 and the job result is done. Jul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory Jul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory Jul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory Jul 07 20:13:50 managed-node2 podman[47019]: Pods stopped: Jul 07 20:13:50 managed-node2 podman[47019]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6 Jul 07 20:13:50 managed-node2 podman[47019]: Pods removed: Jul 07 20:13:50 managed-node2 podman[47019]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6 Jul 07 20:13:50 managed-node2 podman[47019]: Secrets removed: Jul 07 20:13:50 managed-node2 podman[47019]: Volumes removed: Jul 07 20:13:50 managed-node2 systemd[27434]: Stopped podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 152 and the job result is done. Jul 07 20:13:50 managed-node2 systemd[27434]: podman-kube@-home-podman_basic_user-.config-containers-ansible\x2dkubernetes.d-httpd1.yml.service: Consumed 589ms CPU time, 74.3M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jul 07 20:13:50 managed-node2 sudo[47002]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:50 managed-node2 python3.12[47222]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:51 managed-node2 sudo[47429]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ragjadshxwcsmhyabehcrpjvrodecwop ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933631.1425395-17190-192702739809308/AnsiballZ_podman_play.py' Jul 07 20:13:51 managed-node2 sudo[47429]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play version: 5.5.1, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 07 20:13:51 managed-node2 systemd[27434]: Started podman-47439.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 154. Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 07 20:13:51 managed-node2 sudo[47429]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:13:52 managed-node2 python3.12[47600]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:53 managed-node2 python3.12[47755]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:13:53 managed-node2 python3.12[47911]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:54 managed-node2 python3.12[48068]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:55 managed-node2 python3.12[48224]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:13:55 managed-node2 systemd[1]: Reload requested from client PID 48227 ('systemctl') (unit session-5.scope)... Jul 07 20:13:55 managed-node2 systemd[1]: Reloading... Jul 07 20:13:55 managed-node2 systemd-rc-local-generator[48270]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:13:55 managed-node2 systemd[1]: Reloading finished in 218 ms. Jul 07 20:13:55 managed-node2 systemd[1]: Stopping podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has begun execution. ░░ ░░ The job identifier is 2445. Jul 07 20:13:56 managed-node2 podman[48286]: 2025-07-07 20:13:56.040685373 -0400 EDT m=+0.023115658 pod stop 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2) Jul 07 20:14:06 managed-node2 podman[48286]: time="2025-07-07T20:14:06-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL" Jul 07 20:14:06 managed-node2 systemd[1]: libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.074195184 -0400 EDT m=+10.056625730 container died 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9fa181bdd3e6904e4a3e75e30d4505da5f0cd638ecd007c086bea8376c79fc52-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-9fa181bdd3e6904e4a3e75e30d4505da5f0cd638ecd007c086bea8376c79fc52-merged.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.11235352 -0400 EDT m=+10.094783772 container cleanup 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:14:06 managed-node2 systemd[1]: libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.131606724 -0400 EDT m=+10.114044140 container died 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:14:06 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:14:06 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:14:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jul 07 20:14:06 managed-node2 systemd[1]: run-netns-netns\x2d82249bc3\x2db382\x2d7b9a\x2d81bc\x2d86e8308d188a.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d82249bc3\x2db382\x2d7b9a\x2d81bc\x2d86e8308d188a.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.202757575 -0400 EDT m=+10.185187931 container cleanup 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 systemd[1]: Removed slice machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice - cgroup machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice. ░░ Subject: A stop job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished. ░░ ░░ The job identifier is 2446 and the job result is done. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.23022593 -0400 EDT m=+10.212656217 container remove 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.25693135 -0400 EDT m=+10.239361641 container remove 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 systemd[1]: machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice: Failed to open /run/systemd/transient/machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice: No such file or directory Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.265332892 -0400 EDT m=+10.247763153 pod remove 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2) Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.268022285 -0400 EDT m=+10.250452816 container kill 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 systemd[1]: libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.274842916 -0400 EDT m=+10.257273277 container died 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.336039419 -0400 EDT m=+10.318469707 container remove 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service) Jul 07 20:14:06 managed-node2 podman[48286]: Pods stopped: Jul 07 20:14:06 managed-node2 podman[48286]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 Jul 07 20:14:06 managed-node2 podman[48286]: Pods removed: Jul 07 20:14:06 managed-node2 podman[48286]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 Jul 07 20:14:06 managed-node2 podman[48286]: Secrets removed: Jul 07 20:14:06 managed-node2 podman[48286]: Volumes removed: Jul 07 20:14:06 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state. Jul 07 20:14:06 managed-node2 systemd[1]: Stopped podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service has finished. ░░ ░░ The job identifier is 2445 and the job result is done. Jul 07 20:14:06 managed-node2 python3.12[48488]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped: Pods removed: Secrets removed: Volumes removed: Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: Jul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0 Jul 07 20:14:07 managed-node2 python3.12[48813]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:08 managed-node2 python3.12[48969]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:10 managed-node2 python3.12[49126]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:10 managed-node2 python3.12[49282]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:14:10 managed-node2 systemd[1]: Reload requested from client PID 49285 ('systemctl') (unit session-5.scope)... Jul 07 20:14:10 managed-node2 systemd[1]: Reloading... Jul 07 20:14:11 managed-node2 systemd-rc-local-generator[49327]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:14:11 managed-node2 systemd[1]: Reloading finished in 211 ms. Jul 07 20:14:11 managed-node2 systemd[1]: Stopping podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play... ░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has begun execution. ░░ ░░ The job identifier is 2447. Jul 07 20:14:11 managed-node2 podman[49344]: 2025-07-07 20:14:11.206854573 -0400 EDT m=+0.022831781 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:14:21 managed-node2 podman[49344]: time="2025-07-07T20:14:21-04:00" level=warning msg="StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL" Jul 07 20:14:21 managed-node2 systemd[1]: libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.240228718 -0400 EDT m=+10.056206048 container died 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test) Jul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay-350f35f9a3dec1a954b9c8301592ec6c7cff326df9aa3350ca38ff2248bf45f0-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-350f35f9a3dec1a954b9c8301592ec6c7cff326df9aa3350ca38ff2248bf45f0-merged.mount has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.280265572 -0400 EDT m=+10.096242780 container cleanup 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:14:21 managed-node2 systemd[1]: libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.298799022 -0400 EDT m=+10.114776654 container died afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 systemd[1]: run-p32110-i32410.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p32110-i32410.scope has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:14:21 managed-node2 kernel: veth1 (unregistering): left allmulticast mode Jul 07 20:14:21 managed-node2 kernel: veth1 (unregistering): left promiscuous mode Jul 07 20:14:21 managed-node2 kernel: podman1: port 2(veth1) entered disabled state Jul 07 20:14:21 managed-node2 NetworkManager[714]: [1751933661.3375] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 07 20:14:21 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2448. Jul 07 20:14:21 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2448. Jul 07 20:14:21 managed-node2 systemd[1]: run-netns-netns\x2dd8d23001\x2dccdd\x2d98d5\x2d0185\x2d01ce80e8c916.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd8d23001\x2dccdd\x2d98d5\x2d0185\x2d01ce80e8c916.mount has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.414963852 -0400 EDT m=+10.230941061 container cleanup afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 systemd[1]: Removed slice machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice - cgroup machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice. ░░ Subject: A stop job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished. ░░ ░░ The job identifier is 2527 and the job result is done. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.422214439 -0400 EDT m=+10.238191678 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.428641853 -0400 EDT m=+10.244619177 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.436145908 -0400 EDT m=+10.252123291 container kill f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 systemd[1]: libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.451967497 -0400 EDT m=+10.267945068 container died f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.478053211 -0400 EDT m=+10.294030464 container remove 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.504395032 -0400 EDT m=+10.320372288 container remove afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.513441882 -0400 EDT m=+10.329419091 pod remove f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3) Jul 07 20:14:21 managed-node2 podman[49397]: 2025-07-07 20:14:21.533507126 -0400 EDT m=+0.087614608 container cleanup f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.560230359 -0400 EDT m=+10.376207602 container remove f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service) Jul 07 20:14:21 managed-node2 podman[49344]: Pods stopped: Jul 07 20:14:21 managed-node2 podman[49344]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f Jul 07 20:14:21 managed-node2 podman[49344]: Pods removed: Jul 07 20:14:21 managed-node2 podman[49344]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f Jul 07 20:14:21 managed-node2 podman[49344]: Secrets removed: Jul 07 20:14:21 managed-node2 podman[49344]: Volumes removed: Jul 07 20:14:21 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state. Jul 07 20:14:21 managed-node2 systemd[1]: Stopped podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play. ░░ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service has finished. ░░ ░░ The job identifier is 2447 and the job result is done. Jul 07 20:14:21 managed-node2 python3.12[49562]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-rootfs-merge.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 python3.12[49720]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:14:22 managed-node2 python3.12[49720]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml Jul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:22 managed-node2 python3.12[49888]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:23 managed-node2 python3.12[50043]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 07 20:14:24 managed-node2 python3.12[50199]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:24 managed-node2 sudo[50407]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jpalnwprgspnzsnsvsnylrerubrespev ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933664.3660543-18219-69893342173436/AnsiballZ_podman_container_info.py' Jul 07 20:14:24 managed-node2 sudo[50407]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:24 managed-node2 python3.12[50410]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None Jul 07 20:14:24 managed-node2 systemd[27434]: Started podman-50411.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 158. Jul 07 20:14:24 managed-node2 sudo[50407]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:25 managed-node2 sudo[50623]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lygojmlyopjnwoxfpskxczdponstloyj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933664.983715-18245-114379203266243/AnsiballZ_command.py' Jul 07 20:14:25 managed-node2 sudo[50623]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:25 managed-node2 python3.12[50626]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:25 managed-node2 systemd[27434]: Started podman-50627.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 162. Jul 07 20:14:25 managed-node2 sudo[50623]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:25 managed-node2 sudo[50839]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iwlmlwzhyduyhuxfusqoosixqzgzqvsl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933665.559254-18262-213663586865707/AnsiballZ_command.py' Jul 07 20:14:25 managed-node2 sudo[50839]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:25 managed-node2 python3.12[50842]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:25 managed-node2 systemd[27434]: Started podman-50843.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 166. Jul 07 20:14:25 managed-node2 sudo[50839]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:26 managed-node2 python3.12[51004]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None Jul 07 20:14:26 managed-node2 systemd[1]: Stopping user@3001.service - User Manager for UID 3001... ░░ Subject: A stop job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 2529. Jul 07 20:14:26 managed-node2 systemd[27434]: Activating special unit exit.target... Jul 07 20:14:26 managed-node2 systemd[27434]: Stopping podman-pause-8c2d7b35.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 177. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice app-podman\x2dkube.slice - Slice /app/podman-kube. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 181 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: app-podman\x2dkube.slice: Consumed 589ms CPU time, 74.4M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice - cgroup user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 178 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target default.target - Main User Target. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 191 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target basic.target - Basic System. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 190 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target paths.target - Paths. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 184 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target sockets.target - Sockets. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 187 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped target timers.target - Timers. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 189 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 186 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 185 and the job result is done. Jul 07 20:14:26 managed-node2 dbus-broker[27991]: Dispatched 2275 messages @ 3(±13)μs / message. ░░ Subject: Dispatched 2275 messages ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ This message is printed by dbus-broker when shutting down. It includes metric ░░ information collected during the runtime of dbus-broker. ░░ ░░ The message lists the number of dispatched messages ░░ (in this case 2275) as well as the mean time to ░░ handling a single message. The time measurements exclude the time spent on ░░ writing to and reading from the kernel. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopping dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 174. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 179 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped podman-pause-8c2d7b35.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 177 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice user.slice - Slice /user. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 176 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Stopped dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 174 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice session.slice - User Core Session Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 175 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Closed dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 180 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: Removed slice app.slice - User Application Slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 182 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[27434]: app.slice: Consumed 615ms CPU time, 74.5M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT completed and consumed the indicated resources. Jul 07 20:14:26 managed-node2 systemd[27434]: Reached target shutdown.target - Shutdown. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 173. Jul 07 20:14:26 managed-node2 systemd[27434]: Finished systemd-exit.service - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 171. Jul 07 20:14:26 managed-node2 systemd[27434]: Reached target exit.target - Exit the Session. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 170. Jul 07 20:14:26 managed-node2 systemd-logind[659]: Removed session 6. ░░ Subject: Session 6 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 6 has been terminated. Jul 07 20:14:26 managed-node2 systemd[1]: user@3001.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@3001.service has successfully entered the 'dead' state. Jul 07 20:14:26 managed-node2 systemd[1]: Stopped user@3001.service - User Manager for UID 3001. ░░ Subject: A stop job for unit user@3001.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user@3001.service has finished. ░░ ░░ The job identifier is 2529 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[1]: user@3001.service: Consumed 2.005s CPU time, 92.7M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user@3001.service completed and consumed the indicated resources. Jul 07 20:14:26 managed-node2 systemd[1]: Stopping user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A stop job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 2528. Jul 07 20:14:26 managed-node2 systemd[1]: run-user-3001.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-user-3001.mount has successfully entered the 'dead' state. Jul 07 20:14:26 managed-node2 systemd[1]: user-runtime-dir@3001.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-runtime-dir@3001.service has successfully entered the 'dead' state. Jul 07 20:14:26 managed-node2 systemd[1]: Stopped user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A stop job for unit user-runtime-dir@3001.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-runtime-dir@3001.service has finished. ░░ ░░ The job identifier is 2528 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[1]: Removed slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A stop job for unit user-3001.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit user-3001.slice has finished. ░░ ░░ The job identifier is 2530 and the job result is done. Jul 07 20:14:26 managed-node2 systemd[1]: user-3001.slice: Consumed 2.031s CPU time, 92.8M memory peak. ░░ Subject: Resources consumed by unit runtime ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit user-3001.slice completed and consumed the indicated resources. Jul 07 20:14:26 managed-node2 python3.12[51165]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:29 managed-node2 python3.12[51321]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:31 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:14:31 managed-node2 python3.12[51478]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:33 managed-node2 python3.12[51634]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:36 managed-node2 python3.12[51790]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:38 managed-node2 python3.12[51946]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:39 managed-node2 sudo[52152]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tfrixhsqpnpoemptjbnyouggwynzakes ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933679.2995527-18658-251091507513885/AnsiballZ_command.py' Jul 07 20:14:39 managed-node2 sudo[52152]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:39 managed-node2 python3.12[52155]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:39 managed-node2 sudo[52152]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:40 managed-node2 python3.12[52317]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd2 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:40 managed-node2 python3.12[52479]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd3 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:14:40 managed-node2 sudo[52692]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tblibtrtarfdedjsuypqezvumnboznzr ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933680.662618-18720-8327876079082/AnsiballZ_command.py' Jul 07 20:14:40 managed-node2 sudo[52692]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jul 07 20:14:40 managed-node2 python3.12[52695]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:41 managed-node2 sudo[52692]: pam_unix(sudo:session): session closed for user podman_basic_user Jul 07 20:14:41 managed-node2 python3.12[52853]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:41 managed-node2 python3.12[53011]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active ' _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:42 managed-node2 python3.12[53169]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:44 managed-node2 python3.12[53479]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:45 managed-node2 python3.12[53640]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:14:45 managed-node2 python3.12[53796]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:48 managed-node2 python3.12[53953]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jul 07 20:14:48 managed-node2 python3.12[54109]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:49 managed-node2 python3.12[54266]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:49 managed-node2 python3.12[54422]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:50 managed-node2 python3.12[54578]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:51 managed-node2 python3.12[54734]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:51 managed-node2 python3.12[54889]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:52 managed-node2 python3.12[55044]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:53 managed-node2 python3.12[55199]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:14:53 managed-node2 python3.12[55355]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:55 managed-node2 python3.12[55512]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:56 managed-node2 python3.12[55668]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:14:56 managed-node2 python3.12[55825]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:56 managed-node2 python3.12[55980]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:58 managed-node2 python3.12[56135]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:59 managed-node2 python3.12[56292]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:00 managed-node2 python3.12[56448]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:15:00 managed-node2 python3.12[56605]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:01 managed-node2 python3.12[56760]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:02 managed-node2 python3.12[56915]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None Jul 07 20:15:02 managed-node2 python3.12[57071]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:03 managed-node2 python3.12[57226]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:04 managed-node2 python3.12[57381]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:06 managed-node2 python3.12[57587]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:15:07 managed-node2 python3.12[57771]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:07 managed-node2 python3.12[57926]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:09 managed-node2 python3.12[58236]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:10 managed-node2 python3.12[58398]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:15:10 managed-node2 python3.12[58554]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:12 managed-node2 python3.12[58711]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:13 managed-node2 python3.12[58868]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:14 managed-node2 python3.12[59023]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:14 managed-node2 python3.12[59148]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933714.0353012-19840-138139497644559/.source.pod dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:15 managed-node2 python3.12[59303]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:15:15 managed-node2 systemd[1]: Reload requested from client PID 59304 ('systemctl') (unit session-5.scope)... Jul 07 20:15:15 managed-node2 systemd[1]: Reloading... Jul 07 20:15:15 managed-node2 systemd-rc-local-generator[59347]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:15:15 managed-node2 systemd[1]: Reloading finished in 191 ms. Jul 07 20:15:16 managed-node2 python3.12[59512]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:15:16 managed-node2 systemd[1]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has begun execution. ░░ ░░ The job identifier is 2532. Jul 07 20:15:16 managed-node2 systemd[1]: Created slice machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice - cgroup machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice. ░░ Subject: A start job for unit machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice has finished successfully. ░░ ░░ The job identifier is 2616. Jul 07 20:15:16 managed-node2 podman[59516]: 2025-07-07 20:15:16.265732025 -0400 EDT m=+0.061820196 container create 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jul 07 20:15:16 managed-node2 podman[59516]: 2025-07-07 20:15:16.272319983 -0400 EDT m=+0.068408122 pod create fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 (image=, name=quadlet-pod) Jul 07 20:15:16 managed-node2 quadlet-pod-pod-pod[59516]: fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 Jul 07 20:15:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3361] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/9) Jul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered blocking state Jul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered disabled state Jul 07 20:15:16 managed-node2 kernel: veth0: entered allmulticast mode Jul 07 20:15:16 managed-node2 kernel: veth0: entered promiscuous mode Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3447] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10) Jul 07 20:15:16 managed-node2 (udev-worker)[59536]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:15:16 managed-node2 (udev-worker)[59537]: Network interface NamePolicy= disabled on kernel command line. Jul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered blocking state Jul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered forwarding state Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3546] device (veth0): carrier: link connected Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3548] device (podman0): carrier: link connected Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3683] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3701] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3707] device (podman0): Activation: starting connection 'podman0' (845713e4-a167-41fd-b540-293fb4a7aacd) Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3708] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3710] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3712] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3714] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2622. Jul 07 20:15:16 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2622. Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4101] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4105] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4116] device (podman0): Activation: successful, device activated. Jul 07 20:15:16 managed-node2 systemd[1]: Started libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope - libcrun container. ░░ Subject: A start job for unit libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope has finished successfully. ░░ ░░ The job identifier is 2701. Jul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.467418922 -0400 EDT m=+0.168638968 container init 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.469862017 -0400 EDT m=+0.171082040 container start 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service) Jul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.47596405 -0400 EDT m=+0.177184035 pod start fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 (image=, name=quadlet-pod) Jul 07 20:15:16 managed-node2 quadlet-pod-pod-pod[59525]: quadlet-pod Jul 07 20:15:16 managed-node2 systemd[1]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-pod-pod.service has finished successfully. ░░ ░░ The job identifier is 2532. Jul 07 20:15:17 managed-node2 python3.12[59737]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:18 managed-node2 podman[59923]: 2025-07-07 20:15:18.970989919 -0400 EDT m=+0.333232348 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:15:19 managed-node2 python3.12[60094]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:19 managed-node2 python3.12[60249]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:20 managed-node2 python3.12[60374]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933719.570603-19951-98699047571118/.source.container dest=/etc/containers/systemd/quadlet-pod-container.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:20 managed-node2 python3.12[60529]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:15:20 managed-node2 systemd[1]: Reload requested from client PID 60530 ('systemctl') (unit session-5.scope)... Jul 07 20:15:20 managed-node2 systemd[1]: Reloading... Jul 07 20:15:20 managed-node2 systemd-rc-local-generator[60574]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:15:20 managed-node2 systemd[1]: Reloading finished in 199 ms. Jul 07 20:15:21 managed-node2 python3.12[60739]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:15:21 managed-node2 systemd[1]: Starting quadlet-pod-container.service... ░░ Subject: A start job for unit quadlet-pod-container.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has begun execution. ░░ ░░ The job identifier is 2708. Jul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.598559882 -0400 EDT m=+0.046043355 container create 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service) Jul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.641059859 -0400 EDT m=+0.088543485 container init 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z) Jul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.643379876 -0400 EDT m=+0.090863506 container start 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:15:21 managed-node2 systemd[1]: Started quadlet-pod-container.service. ░░ Subject: A start job for unit quadlet-pod-container.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-pod-container.service has finished successfully. ░░ ░░ The job identifier is 2708. Jul 07 20:15:21 managed-node2 quadlet-pod-container[60743]: 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d Jul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.576103282 -0400 EDT m=+0.023586939 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jul 07 20:15:22 managed-node2 python3.12[60912]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:23 managed-node2 python3.12[61068]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:24 managed-node2 python3.12[61224]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}} {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:25 managed-node2 python3.12[61388]: ansible-user Invoked with name=user_quadlet_pod uid=2223 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jul 07 20:15:25 managed-node2 useradd[61390]: new group: name=user_quadlet_pod, GID=2223 Jul 07 20:15:25 managed-node2 useradd[61390]: new user: name=user_quadlet_pod, UID=2223, GID=2223, home=/home/user_quadlet_pod, shell=/bin/bash, from=/dev/pts/0 Jul 07 20:15:25 managed-node2 rsyslogd[883]: imjournal: journal files changed, reloading... [v8.2506.0-1.el10 try https://www.rsyslog.com/e/0 ] Jul 07 20:15:25 managed-node2 rsyslogd[883]: imjournal: journal files changed, reloading... [v8.2506.0-1.el10 try https://www.rsyslog.com/e/0 ] Jul 07 20:15:26 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:15:27 managed-node2 python3.12[61703]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:28 managed-node2 python3.12[61866]: ansible-getent Invoked with database=passwd key=user_quadlet_pod fail_key=False service=None split=None Jul 07 20:15:28 managed-node2 python3.12[62022]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:29 managed-node2 python3.12[62180]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:29 managed-node2 python3.12[62336]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:31 managed-node2 python3.12[62492]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:32 managed-node2 python3.12[62649]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:32 managed-node2 python3.12[62805]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:33 managed-node2 python3.12[62961]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 07 20:15:33 managed-node2 systemd[1]: Created slice user-2223.slice - User Slice of UID 2223. ░░ Subject: A start job for unit user-2223.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-2223.slice has finished successfully. ░░ ░░ The job identifier is 2871. Jul 07 20:15:33 managed-node2 systemd[1]: Starting user-runtime-dir@2223.service - User Runtime Directory /run/user/2223... ░░ Subject: A start job for unit user-runtime-dir@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@2223.service has begun execution. ░░ ░░ The job identifier is 2793. Jul 07 20:15:34 managed-node2 systemd[1]: Finished user-runtime-dir@2223.service - User Runtime Directory /run/user/2223. ░░ Subject: A start job for unit user-runtime-dir@2223.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@2223.service has finished successfully. ░░ ░░ The job identifier is 2793. Jul 07 20:15:34 managed-node2 systemd[1]: Starting user@2223.service - User Manager for UID 2223... ░░ Subject: A start job for unit user@2223.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@2223.service has begun execution. ░░ ░░ The job identifier is 2873. Jul 07 20:15:34 managed-node2 systemd-logind[659]: New session 7 of user user_quadlet_pod. ░░ Subject: A new session 7 has been created for user user_quadlet_pod ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 7 has been created for the user user_quadlet_pod. ░░ ░░ The leading process of the session is 62973. Jul 07 20:15:34 managed-node2 (systemd)[62973]: pam_unix(systemd-user:session): session opened for user user_quadlet_pod(uid=2223) by user_quadlet_pod(uid=0) Jul 07 20:15:34 managed-node2 systemd[62973]: Queued start job for default target default.target. Jul 07 20:15:34 managed-node2 systemd[62973]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jul 07 20:15:34 managed-node2 systemd[62973]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jul 07 20:15:34 managed-node2 systemd[62973]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jul 07 20:15:34 managed-node2 systemd[62973]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jul 07 20:15:34 managed-node2 systemd[62973]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jul 07 20:15:34 managed-node2 systemd[62973]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jul 07 20:15:34 managed-node2 systemd[62973]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jul 07 20:15:34 managed-node2 systemd[62973]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jul 07 20:15:34 managed-node2 systemd[62973]: Startup finished in 74ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 2223 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 74648 microseconds. Jul 07 20:15:34 managed-node2 systemd[1]: Started user@2223.service - User Manager for UID 2223. ░░ Subject: A start job for unit user@2223.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@2223.service has finished successfully. ░░ ░░ The job identifier is 2873. Jul 07 20:15:34 managed-node2 python3.12[63135]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:34 managed-node2 python3.12[63290]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:35 managed-node2 python3.12[63415]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933734.666246-20447-234683972003494/.source.pod dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:35 managed-node2 sudo[63620]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwdxpsyaqgiwosrsmcnobsvzckxuxltr ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933735.4058456-20475-145399815936558/AnsiballZ_systemd.py' Jul 07 20:15:35 managed-node2 sudo[63620]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:35 managed-node2 python3.12[63623]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:15:35 managed-node2 python3.12[63623]: ansible-systemd [WARNING] Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually Jul 07 20:15:35 managed-node2 systemd[62973]: Reload requested from client PID 63624 ('systemctl')... Jul 07 20:15:35 managed-node2 systemd[62973]: Reloading... Jul 07 20:15:35 managed-node2 systemd[62973]: Reloading finished in 42 ms. Jul 07 20:15:35 managed-node2 sudo[63620]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:36 managed-node2 sudo[63839]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ofgmoaezuncvsjzylzfbwkllzxyiawqa ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933736.0668013-20497-124358191920012/AnsiballZ_systemd.py' Jul 07 20:15:36 managed-node2 sudo[63839]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:36 managed-node2 python3.12[63842]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:15:36 managed-node2 systemd[62973]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user.... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 25. Jul 07 20:15:36 managed-node2 sh[63846]: active Jul 07 20:15:36 managed-node2 systemd[62973]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user.. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 25. Jul 07 20:15:36 managed-node2 systemd[62973]: Starting quadlet-pod-pod-pod.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jul 07 20:15:36 managed-node2 systemd[62973]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jul 07 20:15:36 managed-node2 systemd[62973]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 26. Jul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jul 07 20:15:36 managed-node2 systemd[62973]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 26. Jul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Ready Jul 07 20:15:36 managed-node2 systemd[62973]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 33. Jul 07 20:15:36 managed-node2 systemd[62973]: Created slice user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice - cgroup user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 32. Jul 07 20:15:36 managed-node2 quadlet-pod-pod-pod[63855]: 704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de Jul 07 20:15:36 managed-node2 systemd[62973]: Started podman-pause-569872be.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 36. Jul 07 20:15:36 managed-node2 systemd[62973]: Started libpod-31f1b98498c312272d594cc143121f0c4e208b416f5b06370302a3ade84678f0.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 40. Jul 07 20:15:36 managed-node2 quadlet-pod-pod-pod[63874]: quadlet-pod Jul 07 20:15:36 managed-node2 systemd[62973]: Started quadlet-pod-pod-pod.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jul 07 20:15:36 managed-node2 sudo[63839]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:37 managed-node2 python3.12[64055]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:38 managed-node2 python3.12[64212]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:38 managed-node2 python3.12[64368]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:39 managed-node2 python3.12[64524]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jul 07 20:15:40 managed-node2 sudo[64729]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-opzncrxhlgpablgicvdgsyjydbaatunc ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933740.2555504-20657-253835226843587/AnsiballZ_podman_image.py' Jul 07 20:15:40 managed-node2 sudo[64729]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:40 managed-node2 systemd[62973]: Started podman-64733.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 45. Jul 07 20:15:40 managed-node2 systemd[62973]: Started podman-64740.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 49. Jul 07 20:15:41 managed-node2 systemd[62973]: Started podman-64765.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 53. Jul 07 20:15:41 managed-node2 sudo[64729]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:41 managed-node2 python3.12[64927]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:42 managed-node2 python3.12[65082]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:42 managed-node2 python3.12[65207]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933742.1096382-20721-81203128614920/.source.container dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:43 managed-node2 sudo[65412]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dradgtbmatpvlibzybrbrgncbuvsbmla ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933742.8958044-20756-33448022762234/AnsiballZ_systemd.py' Jul 07 20:15:43 managed-node2 sudo[65412]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:43 managed-node2 python3.12[65415]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jul 07 20:15:43 managed-node2 systemd[62973]: Reload requested from client PID 65416 ('systemctl')... Jul 07 20:15:43 managed-node2 systemd[62973]: Reloading... Jul 07 20:15:43 managed-node2 systemd[62973]: Reloading finished in 50 ms. Jul 07 20:15:43 managed-node2 sudo[65412]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:43 managed-node2 sudo[65630]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ypfsehmisorrgovabzrcscgtxcipauhh ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933743.5809875-20783-151961798381358/AnsiballZ_systemd.py' Jul 07 20:15:43 managed-node2 sudo[65630]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:44 managed-node2 python3.12[65633]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jul 07 20:15:44 managed-node2 systemd[62973]: Starting quadlet-pod-container.service... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 57. Jul 07 20:15:44 managed-node2 systemd[62973]: Started quadlet-pod-container.service. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 57. Jul 07 20:15:44 managed-node2 quadlet-pod-container[65636]: f4293ce7df9976771eedba45cd946f75d5668af766c076f83710b7afcc49d748 Jul 07 20:15:44 managed-node2 sudo[65630]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:44 managed-node2 python3.12[65805]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:45 managed-node2 python3.12[65961]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:45 managed-node2 sudo[66167]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkgmctvpahwcnyvziosokhuvkvmstaqp ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933745.1806111-20829-116188117400154/AnsiballZ_command.py' Jul 07 20:15:45 managed-node2 sudo[66167]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:45 managed-node2 python3.12[66170]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}} {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:45 managed-node2 systemd[62973]: Started podman-66171.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 71. Jul 07 20:15:45 managed-node2 sudo[66167]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:15:46 managed-node2 python3.12[66333]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:47 managed-node2 python3.12[66645]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:48 managed-node2 python3.12[66806]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:49 managed-node2 python3.12[66963]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:49 managed-node2 python3.12[67119]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:51 managed-node2 python3.12[67275]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:51 managed-node2 python3.12[67432]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:52 managed-node2 python3.12[67588]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:52 managed-node2 python3.12[67744]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:53 managed-node2 sudo[67951]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qbrxmfhyqxuzmhgzlevjndrxencddmhb ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933753.0681589-21169-130280143861349/AnsiballZ_systemd.py' Jul 07 20:15:53 managed-node2 sudo[67951]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:15:53 managed-node2 python3.12[67954]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:15:53 managed-node2 systemd[62973]: Reload requested from client PID 67957 ('systemctl')... Jul 07 20:15:53 managed-node2 systemd[62973]: Reloading... Jul 07 20:15:53 managed-node2 systemd[62973]: Reloading finished in 49 ms. Jul 07 20:15:53 managed-node2 systemd[62973]: Stopping quadlet-pod-container.service... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 75. Jul 07 20:16:03 managed-node2 quadlet-pod-container[67969]: time="2025-07-07T20:16:03-04:00" level=warning msg="StopSignal SIGTERM failed to stop container quadlet-pod-container in 10 seconds, resorting to SIGKILL" Jul 07 20:16:03 managed-node2 quadlet-pod-container[67969]: f4293ce7df9976771eedba45cd946f75d5668af766c076f83710b7afcc49d748 Jul 07 20:16:03 managed-node2 systemd[62973]: quadlet-pod-container.service: Main process exited, code=exited, status=137/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit UNIT has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 137. Jul 07 20:16:03 managed-node2 systemd[62973]: Removed slice user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice - cgroup user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 76 and the job result is done. Jul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory Jul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory Jul 07 20:16:03 managed-node2 quadlet-pod-pod-pod[68000]: quadlet-pod Jul 07 20:16:03 managed-node2 systemd[62973]: quadlet-pod-container.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit UNIT has entered the 'failed' state with result 'exit-code'. Jul 07 20:16:03 managed-node2 systemd[62973]: Stopped quadlet-pod-container.service. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 75 and the job result is done. Jul 07 20:16:03 managed-node2 sudo[67951]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory Jul 07 20:16:03 managed-node2 quadlet-pod-pod-pod[68018]: 704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de Jul 07 20:16:04 managed-node2 python3.12[68183]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:16:05 managed-node2 python3.12[68495]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet systemctl list-unit-files | grep quadlet ls -alrtF /etc/containers/systemd /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:06 managed-node2 python3.12[68705]: ansible-ansible.legacy.command Invoked with _raw_params=grep type=AVC /var/log/audit/audit.log _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:06 managed-node2 python3.12[68861]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:08 managed-node2 python3.12[69172]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:09 managed-node2 python3.12[69333]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:16:09 managed-node2 python3.12[69490]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:10 managed-node2 python3.12[69646]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:11 managed-node2 python3.12[69802]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:16:12 managed-node2 python3.12[69959]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:12 managed-node2 python3.12[70115]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:16:13 managed-node2 python3.12[70271]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:16:13 managed-node2 sudo[70478]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zgbhusmnvfxgnyqpigdcfxpfmxddoleb ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933773.6131501-21423-280948783795652/AnsiballZ_systemd.py' Jul 07 20:16:13 managed-node2 sudo[70478]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0) Jul 07 20:16:14 managed-node2 python3.12[70482]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:16:14 managed-node2 systemd[62973]: Reload requested from client PID 70485 ('systemctl')... Jul 07 20:16:14 managed-node2 systemd[62973]: Reloading... Jul 07 20:16:14 managed-node2 systemd[62973]: Reloading finished in 47 ms. Jul 07 20:16:14 managed-node2 sudo[70478]: pam_unix(sudo:session): session closed for user user_quadlet_pod Jul 07 20:16:14 managed-node2 python3.12[70650]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:16:15 managed-node2 python3.12[70963]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=207 changed=14 unreachable=0 failed=2 skipped=237 rescued=2 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.12", "end_time": "2025-07-08T00:16:04.869690+00:00Z", "host": "managed-node2", "message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "start_time": "2025-07-08T00:16:04.851608+00:00Z", "task_name": "Parse quadlet file", "task_path": "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12" }, { "ansible_version": "2.17.12", "delta": "0:00:00.032124", "end_time": "2025-07-07 20:16:06.564883", "host": "managed-node2", "message": "", "rc": 0, "start_time": "2025-07-07 20:16:06.532759", "stdout": "Jul 07 20:11:58 managed-node2 aardvark-dns[28871]: Received SIGHUP\nJul 07 20:11:58 managed-node2 aardvark-dns[28871]: Successfully parsed config\nJul 07 20:11:58 managed-node2 aardvark-dns[28871]: Listen v4 ip {}\nJul 07 20:11:58 managed-node2 aardvark-dns[28871]: Listen v6 ip {}\nJul 07 20:11:58 managed-node2 aardvark-dns[28871]: No configuration found stopping the sever\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:11:58 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:11:58 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d)\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"systemd-logind: Unknown object '/'.\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using transient store: false\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d)\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Shutting down engines\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=29568\nJul 07 20:11:58 managed-node2 systemd[27434]: Stopped libpod-conmon-82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d.scope.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 85 and the job result is done.\nJul 07 20:11:58 managed-node2 systemd[27434]: Removed slice user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice - cgroup user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 84 and the job result is done.\nJul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory\nJul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory\nJul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory\nJul 07 20:11:58 managed-node2 podman[29546]: Pods stopped:\nJul 07 20:11:58 managed-node2 podman[29546]: 033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824\nJul 07 20:11:58 managed-node2 podman[29546]: Pods removed:\nJul 07 20:11:58 managed-node2 podman[29546]: 033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824\nJul 07 20:11:58 managed-node2 podman[29546]: Secrets removed:\nJul 07 20:11:58 managed-node2 podman[29546]: Volumes removed:\nJul 07 20:11:58 managed-node2 systemd[27434]: Created slice user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice - cgroup user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 86.\nJul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-b68ee141dcf27814664a590d406043e669f2802be350ecb975174d3342b509fb.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 90.\nJul 07 20:11:58 managed-node2 systemd[27434]: Started rootless-netns-e8ce431d.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 94.\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:11:58 managed-node2 kernel: veth0: entered allmulticast mode\nJul 07 20:11:58 managed-node2 kernel: veth0: entered promiscuous mode\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nJul 07 20:11:58 managed-node2 systemd[27434]: Started run-p29607-i29907.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 98.\nJul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-7f0ec0a3585717aa5dc0675fd2d3f2fb67af120ad764f3608d3ed615db7ae81d.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 102.\nJul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-d63f16e390f387a3579fe8190c8b955ae0c9b5f12e78ccbea11550f14575c651.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 107.\nJul 07 20:11:58 managed-node2 podman[29546]: Pod:\nJul 07 20:11:58 managed-node2 podman[29546]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6\nJul 07 20:11:58 managed-node2 podman[29546]: Container:\nJul 07 20:11:58 managed-node2 podman[29546]: d63f16e390f387a3579fe8190c8b955ae0c9b5f12e78ccbea11550f14575c651\nJul 07 20:11:58 managed-node2 systemd[27434]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 71.\nJul 07 20:11:58 managed-node2 sudo[29540]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:11:59 managed-node2 python3.12[29790]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:11:59 managed-node2 python3.12[29946]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:01 managed-node2 python3.12[30103]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:02 managed-node2 python3.12[30259]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:02 managed-node2 python3.12[30414]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:12:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:12:03 managed-node2 podman[30600]: 2025-07-07 20:12:03.454853091 -0400 EDT m=+0.369862355 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:03 managed-node2 python3.12[30769]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:12:04 managed-node2 python3.12[30924]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:04 managed-node2 python3.12[31079]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:12:05 managed-node2 python3.12[31204]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933524.4345405-13159-278912456486895/.source.yml _original_basename=.wig720_j follow=False checksum=b4126723a3845d354fb7beda3b3f44919cb02dd7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.511414005 -0400 EDT m=+0.019368086 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:12:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice - cgroup machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1968.\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.55953636 -0400 EDT m=+0.067490307 container create a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.565742112 -0400 EDT m=+0.073696026 pod create 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.592586777 -0400 EDT m=+0.100540709 container create f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.567663828 -0400 EDT m=+0.075617914 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:12:05 managed-node2 kernel: veth0: entered allmulticast mode\nJul 07 20:12:05 managed-node2 kernel: veth0: entered promiscuous mode\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6188] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6207] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4)\nJul 07 20:12:05 managed-node2 (udev-worker)[31379]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nJul 07 20:12:05 managed-node2 (udev-worker)[31378]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6283] device (veth0): carrier: link connected\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6285] device (podman1): carrier: link connected\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6369] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6382] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6387] device (podman1): Activation: starting connection 'podman1' (1839a03b-e916-4e56-ad20-699cf8b9a55a)\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6389] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6402] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6406] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6413] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1974.\nJul 07 20:12:05 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1974.\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7043] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7046] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7051] device (podman1): Activation: successful, device activated.\nJul 07 20:12:05 managed-node2 systemd[1]: Started run-p31412-i31712.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-p31412-i31712.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p31412-i31712.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2053.\nJul 07 20:12:05 managed-node2 aardvark-dns[31412]: starting aardvark on a child with pid 31419\nJul 07 20:12:05 managed-node2 aardvark-dns[31419]: Successfully parsed config\nJul 07 20:12:05 managed-node2 aardvark-dns[31419]: Listen v4 ip {\"podman-default-kube-network\": [10.89.0.1]}\nJul 07 20:12:05 managed-node2 aardvark-dns[31419]: Listen v6 ip {}\nJul 07 20:12:05 managed-node2 aardvark-dns[31419]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53]\nJul 07 20:12:05 managed-node2 systemd[1]: Started libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2059.\nJul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach}\nJul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : terminal_ctrl_fd: 12\nJul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : winsz read side: 16, winsz write side: 17\nJul 07 20:12:05 managed-node2 systemd[1]: Started libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2066.\nJul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : container PID: 31426\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.796580679 -0400 EDT m=+0.304534648 container init a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.799350929 -0400 EDT m=+0.307304949 container start a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:05 managed-node2 systemd[1]: Started libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2073.\nJul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/11/attach}\nJul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : terminal_ctrl_fd: 11\nJul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : winsz read side: 15, winsz write side: 16\nJul 07 20:12:05 managed-node2 systemd[1]: Started libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2080.\nJul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : container PID: 31431\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.854777496 -0400 EDT m=+0.362731459 container init f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.857558887 -0400 EDT m=+0.365512915 container start f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.863542588 -0400 EDT m=+0.371496538 pod start 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2)\nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\n Container:\n f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\n \nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Initializing event backend journald\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba bridge podman1 2025-07-07 20:10:03.41385383 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"setting container name 2dbe050d31da-infra\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Allocated lock 1 for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are supported\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created container \\\"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Container \\\"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Container \\\"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\\\" has run directory \\\"/run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"adding container to pod httpd2\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"setting container name httpd2-httpd2\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Allocated lock 2 for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created container \\\"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Container \\\"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Container \\\"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\\\" has run directory \\\"/run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Strongconnecting node a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Pushed a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 onto stack\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Finishing node a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307. Popped a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 off stack\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Strongconnecting node f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Pushed f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b onto stack\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Finishing node f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b. Popped f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b off stack\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Made network namespace at /run/netns/netns-1ce7c5df-883b-4dd4-e4c5-5e8b3ad8b1f6 for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created root filesystem for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 at /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/rootfs/merge\"\n [DEBUG netavark::network::validation] Validating network namespace...\n [DEBUG netavark::commands::setup] Setting up...\n [INFO netavark::firewall] Using nftables firewall driver\n [DEBUG netavark::network::bridge] Setup network podman-default-kube-network\n [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24]\n [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24]\n [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1\n [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2\n [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0\n [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1\n [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2\n [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100)\n [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24\n [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source\n [INFO netavark::firewall::nft] Creating container chain nv_51bfc940_10_89_0_0_nm24\n [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1\n [DEBUG netavark::dns::aardvark] Spawning aardvark server\n [DEBUG netavark::dns::aardvark] start aardvark-dns: [\"systemd-run\", \"-q\", \"--scope\", \"/usr/libexec/podman/aardvark-dns\", \"--config\", \"/run/containers/networks/aardvark-dns\", \"-p\", \"53\", \"run\"]\n [DEBUG netavark::commands::setup] {\n \"podman-default-kube-network\": StatusBlock {\n dns_search_domains: Some(\n [\n \"dns.podman\",\n ],\n ),\n dns_server_ips: Some(\n [\n 10.89.0.1,\n ],\n ),\n interfaces: Some(\n {\n \"eth0\": NetInterface {\n mac_address: \"ce:aa:8c:aa:62:92\",\n subnets: Some(\n [\n NetAddress {\n gateway: Some(\n 10.89.0.1,\n ),\n ipnet: 10.89.0.2/24,\n },\n ],\n ),\n },\n },\n ),\n },\n }\n [DEBUG netavark::commands::setup] Setup complete\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Setting Cgroups for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 to machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice:libpod:a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/rootfs/merge\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created OCI spec for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 at /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/config.json\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 -u a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata -p /run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/pidfile -n 2dbe050d31da-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307]\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice and unitName libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Received: 31426\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Got Conmon PID as 31424\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 in OCI runtime\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Starting container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 with command [/catatonit -P]\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Started container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/66KUKQ3YMIWXUMPTPGDU24SJUU,upperdir=/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/diff,workdir=/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c198,c290\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Mounted container \\\"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\\\" at \\\"/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/merged\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created root filesystem for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b at /var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/merged\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Setting Cgroups for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b to machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice:libpod:f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created OCI spec for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b at /var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/config.json\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b -u f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata -p /run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b]\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice and unitName libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Received: 31431\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Got Conmon PID as 31429\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b in OCI runtime\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Starting container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Started container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Shutting down engines\"\nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 07 20:12:06 managed-node2 python3.12[31587]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:12:06 managed-node2 systemd[1]: Reload requested from client PID 31588 ('systemctl') (unit session-5.scope)...\nJul 07 20:12:06 managed-node2 systemd[1]: Reloading...\nJul 07 20:12:06 managed-node2 systemd-rc-local-generator[31626]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:06 managed-node2 systemd[1]: Reloading finished in 201 ms.\nJul 07 20:12:07 managed-node2 python3.12[31800]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None\nJul 07 20:12:07 managed-node2 systemd[1]: Reload requested from client PID 31803 ('systemctl') (unit session-5.scope)...\nJul 07 20:12:07 managed-node2 systemd[1]: Reloading...\nJul 07 20:12:07 managed-node2 systemd-rc-local-generator[31851]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:07 managed-node2 systemd[1]: Reloading finished in 212 ms.\nJul 07 20:12:08 managed-node2 python3.12[32015]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:12:08 managed-node2 systemd[1]: Created slice system-podman\\x2dkube.slice - Slice /system/podman-kube.\n\u2591\u2591 Subject: A start job for unit system-podman\\x2dkube.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit system-podman\\x2dkube.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2088.\nJul 07 20:12:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2087.\nJul 07 20:12:08 managed-node2 podman[32019]: 2025-07-07 20:12:08.118637911 -0400 EDT m=+0.022713444 pod stop 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2)\nJul 07 20:12:15 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 podman[32019]: time=\"2025-07-07T20:12:18-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:12:18 managed-node2 systemd[1]: libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : container 31431 exited with status 137\nJul 07 20:12:18 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice/libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope/container/memory.events\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.143617522 -0400 EDT m=+10.047693144 container died f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b)\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using transient store: false\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Initializing event backend journald\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.191389905 -0400 EDT m=+10.095465414 container cleanup f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b)\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Shutting down engines\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=32031\nJul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 systemd[1]: libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 systemd[1]: libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice/libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope/container/memory.events\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.211877781 -0400 EDT m=+10.115953439 container died a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra)\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307)\"\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:12:18 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:12:18 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: Received SIGHUP\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: Successfully parsed config\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: Listen v4 ip {}\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: Listen v6 ip {}\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: No configuration found stopping the sever\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using transient store: false\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Initializing event backend journald\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nJul 07 20:12:18 managed-node2 systemd[1]: run-p31412-i31712.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p31412-i31712.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.2554] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:12:18 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2172.\nJul 07 20:12:18 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2172.\nJul 07 20:12:18 managed-node2 systemd[1]: run-netns-netns\\x2d1ce7c5df\\x2d883b\\x2d4dd4\\x2de4c5\\x2d5e8b3ad8b1f6.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d1ce7c5df\\x2d883b\\x2d4dd4\\x2de4c5\\x2d5e8b3ad8b1f6.mount has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.329738431 -0400 EDT m=+10.233814039 container cleanup a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307)\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Shutting down engines\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=32043\nJul 07 20:12:18 managed-node2 systemd[1]: libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 systemd[1]: Stopped libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope.\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2252 and the job result is done.\nJul 07 20:12:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice - cgroup machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2251 and the job result is done.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.395138747 -0400 EDT m=+10.299214275 container remove f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.421655838 -0400 EDT m=+10.325731365 container remove a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:18 managed-node2 systemd[1]: machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice: Failed to open /run/systemd/transient/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice: No such file or directory\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.430187452 -0400 EDT m=+10.334262942 pod remove 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2)\nJul 07 20:12:18 managed-node2 podman[32019]: Pods stopped:\nJul 07 20:12:18 managed-node2 podman[32019]: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\nJul 07 20:12:18 managed-node2 podman[32019]: Pods removed:\nJul 07 20:12:18 managed-node2 podman[32019]: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\nJul 07 20:12:18 managed-node2 podman[32019]: Secrets removed:\nJul 07 20:12:18 managed-node2 podman[32019]: Volumes removed:\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.430564473 -0400 EDT m=+10.334640043 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.451338654 -0400 EDT m=+10.355414196 container create 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice - cgroup machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2253.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.492350688 -0400 EDT m=+10.396426207 container create 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.498774654 -0400 EDT m=+10.402850152 pod create 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.52421506 -0400 EDT m=+10.428290660 container create 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.524550628 -0400 EDT m=+10.428626163 container restart 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 systemd[1]: Started libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2259.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.500507692 -0400 EDT m=+10.404583357 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.584365043 -0400 EDT m=+10.488440589 container init 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.587275237 -0400 EDT m=+10.491350731 container start 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:12:18 managed-node2 kernel: veth0: entered allmulticast mode\nJul 07 20:12:18 managed-node2 kernel: veth0: entered promiscuous mode\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nJul 07 20:12:18 managed-node2 (udev-worker)[32052]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:18 managed-node2 (udev-worker)[32051]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6102] device (podman1): carrier: link connected\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6105] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5)\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6153] device (veth0): carrier: link connected\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6156] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6)\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6318] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6330] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6372] device (podman1): Activation: starting connection 'podman1' (d4ed169d-27e3-42b9-8610-eac77be55153)\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6374] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6376] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6377] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6380] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6494] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6511] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6517] device (podman1): Activation: successful, device activated.\nJul 07 20:12:18 managed-node2 systemd[1]: Started run-p32110-i32410.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-p32110-i32410.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p32110-i32410.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2265.\nJul 07 20:12:18 managed-node2 systemd[1]: Started libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2271.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.743416519 -0400 EDT m=+10.647492104 container init 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.745567921 -0400 EDT m=+10.649643561 container start 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 systemd[1]: Started libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2278.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.78342779 -0400 EDT m=+10.687503327 container init 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.786203333 -0400 EDT m=+10.690278905 container start 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.792190652 -0400 EDT m=+10.696266270 pod start 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2)\nJul 07 20:12:18 managed-node2 podman[32019]: Pod:\nJul 07 20:12:18 managed-node2 podman[32019]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98\nJul 07 20:12:18 managed-node2 podman[32019]: Container:\nJul 07 20:12:18 managed-node2 podman[32019]: 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd\nJul 07 20:12:18 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2087.\nJul 07 20:12:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:19 managed-node2 python3.12[32279]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:20 managed-node2 python3.12[32436]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:21 managed-node2 python3.12[32592]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:22 managed-node2 python3.12[32747]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:23 managed-node2 podman[32933]: 2025-07-07 20:12:23.116451069 -0400 EDT m=+0.400655980 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:23 managed-node2 python3.12[33102]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:23 managed-node2 python3.12[33257]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:24 managed-node2 python3.12[33412]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:12:24 managed-node2 python3.12[33537]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933544.127541-13777-126359990981212/.source.yml _original_basename=.mz8q_k1v follow=False checksum=c8e8f54a2e9107a24008cfb6f1d2d59b89d86a42 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:25 managed-node2 python3.12[33692]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.227950329 -0400 EDT m=+0.015681172 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:12:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice - cgroup machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2285.\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.26795522 -0400 EDT m=+0.055686154 container create 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.274184949 -0400 EDT m=+0.061915786 pod create 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.300940456 -0400 EDT m=+0.088671322 container create bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered blocking state\nJul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:12:25 managed-node2 kernel: veth1: entered allmulticast mode\nJul 07 20:12:25 managed-node2 kernel: veth1: entered promiscuous mode\nJul 07 20:12:25 managed-node2 NetworkManager[714]: [1751933545.3256] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7)\nJul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered blocking state\nJul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.275812121 -0400 EDT m=+0.063543116 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:25 managed-node2 (udev-worker)[33710]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:25 managed-node2 NetworkManager[714]: [1751933545.3293] device (veth1): carrier: link connected\nJul 07 20:12:25 managed-node2 systemd[1]: Started libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2291.\nJul 07 20:12:25 managed-node2 systemd[1]: Started libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2298.\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.421199278 -0400 EDT m=+0.208930271 container init 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.423916963 -0400 EDT m=+0.211647873 container start 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:25 managed-node2 systemd[1]: Started libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2305.\nJul 07 20:12:25 managed-node2 systemd[1]: Started libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2312.\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.47967795 -0400 EDT m=+0.267408864 container init bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.482134977 -0400 EDT m=+0.269865956 container start bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.48810857 -0400 EDT m=+0.275839437 pod start 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:26 managed-node2 python3.12[33903]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:12:26 managed-node2 systemd[1]: Reload requested from client PID 33904 ('systemctl') (unit session-5.scope)...\nJul 07 20:12:26 managed-node2 systemd[1]: Reloading...\nJul 07 20:12:26 managed-node2 systemd-rc-local-generator[33949]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:26 managed-node2 systemd[1]: Reloading finished in 216 ms.\nJul 07 20:12:26 managed-node2 python3.12[34116]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None\nJul 07 20:12:26 managed-node2 systemd[1]: Reload requested from client PID 34119 ('systemctl') (unit session-5.scope)...\nJul 07 20:12:26 managed-node2 systemd[1]: Reloading...\nJul 07 20:12:27 managed-node2 systemd-rc-local-generator[34169]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:27 managed-node2 systemd[1]: Reloading finished in 222 ms.\nJul 07 20:12:27 managed-node2 python3.12[34331]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:12:27 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2319.\nJul 07 20:12:27 managed-node2 podman[34335]: 2025-07-07 20:12:27.799634553 -0400 EDT m=+0.024842125 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:28 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: time=\"2025-07-07T20:12:37-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:12:37 managed-node2 systemd[1]: libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.830033292 -0400 EDT m=+10.055241268 container died bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-404f57844fa9dde2639f08876faa04d4c046f22836d60e203fb44096347d56de-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-404f57844fa9dde2639f08876faa04d4c046f22836d60e203fb44096347d56de-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.871361891 -0400 EDT m=+10.096569436 container cleanup bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test)\nJul 07 20:12:37 managed-node2 systemd[1]: libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 systemd[1]: libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.891562873 -0400 EDT m=+10.116770720 container died 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra)\nJul 07 20:12:37 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:12:37 managed-node2 kernel: veth1 (unregistering): left allmulticast mode\nJul 07 20:12:37 managed-node2 kernel: veth1 (unregistering): left promiscuous mode\nJul 07 20:12:37 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:12:37 managed-node2 systemd[1]: run-netns-netns\\x2d9f683135\\x2dcb1d\\x2d6825\\x2d135b\\x2df344c71f6412.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d9f683135\\x2dcb1d\\x2d6825\\x2d135b\\x2df344c71f6412.mount has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.960143035 -0400 EDT m=+10.185350606 container cleanup 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:37 managed-node2 systemd[1]: Stopping libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope...\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2405.\nJul 07 20:12:37 managed-node2 systemd[1]: libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 systemd[1]: Stopped libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope.\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2405 and the job result is done.\nJul 07 20:12:37 managed-node2 systemd[1]: Removed slice machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice - cgroup machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2404 and the job result is done.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.968733882 -0400 EDT m=+10.193941424 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:37 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.973070119 -0400 EDT m=+10.198277748 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:37 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.999456374 -0400 EDT m=+10.224663949 container remove bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.025619131 -0400 EDT m=+10.250826702 container remove 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:38 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.034330595 -0400 EDT m=+10.259538132 pod remove 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:38 managed-node2 podman[34335]: Pods stopped:\nJul 07 20:12:38 managed-node2 podman[34335]: 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94\nJul 07 20:12:38 managed-node2 podman[34335]: Pods removed:\nJul 07 20:12:38 managed-node2 podman[34335]: 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94\nJul 07 20:12:38 managed-node2 podman[34335]: Secrets removed:\nJul 07 20:12:38 managed-node2 podman[34335]: Volumes removed:\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.034440895 -0400 EDT m=+10.259648519 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.055160997 -0400 EDT m=+10.280368656 container create f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 systemd[1]: Created slice machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice - cgroup machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2406.\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.089857565 -0400 EDT m=+10.315065108 container create afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.096374067 -0400 EDT m=+10.321581608 pod create f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.121708454 -0400 EDT m=+10.346916101 container create 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.122052483 -0400 EDT m=+10.347260058 container restart f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 systemd[1]: Started libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2412.\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.09796996 -0400 EDT m=+10.323177689 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.161037652 -0400 EDT m=+10.386245273 container init f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.163287704 -0400 EDT m=+10.388495346 container start f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered blocking state\nJul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:12:38 managed-node2 kernel: veth1: entered allmulticast mode\nJul 07 20:12:38 managed-node2 kernel: veth1: entered promiscuous mode\nJul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered blocking state\nJul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state\nJul 07 20:12:38 managed-node2 (udev-worker)[34367]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:38 managed-node2 NetworkManager[714]: [1751933558.1845] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8)\nJul 07 20:12:38 managed-node2 NetworkManager[714]: [1751933558.1884] device (veth1): carrier: link connected\nJul 07 20:12:38 managed-node2 systemd[1]: Started libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2418.\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.256569895 -0400 EDT m=+10.481777617 container init afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.259020833 -0400 EDT m=+10.484228554 container start afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 systemd[1]: Started libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2425.\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.305507767 -0400 EDT m=+10.530715363 container init 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.307690208 -0400 EDT m=+10.532897848 container start 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.313601662 -0400 EDT m=+10.538809232 pod start f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:12:38 managed-node2 podman[34335]: Pod:\nJul 07 20:12:38 managed-node2 podman[34335]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f\nJul 07 20:12:38 managed-node2 podman[34335]: Container:\nJul 07 20:12:38 managed-node2 podman[34335]: 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e\nJul 07 20:12:38 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2319.\nJul 07 20:12:39 managed-node2 sudo[34620]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pxlwmudvcyywtlojiblkxiobcxywcxdd ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933558.7922473-14198-135957250003670/AnsiballZ_command.py'\nJul 07 20:12:39 managed-node2 sudo[34620]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:12:39 managed-node2 python3.12[34623]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:39 managed-node2 systemd[27434]: Started podman-34631.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 112.\nJul 07 20:12:39 managed-node2 sudo[34620]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:12:39 managed-node2 python3.12[34794]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:40 managed-node2 python3.12[34957]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:40 managed-node2 sudo[35170]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-edeafswnzssmkzeqkidxujvilmmhsinz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933560.1733143-14258-164223741543922/AnsiballZ_command.py'\nJul 07 20:12:40 managed-node2 sudo[35170]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:12:40 managed-node2 python3.12[35173]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:40 managed-node2 sudo[35170]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:12:40 managed-node2 python3.12[35331]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:41 managed-node2 python3.12[35489]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:42 managed-node2 python3.12[35647]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:42 managed-node2 python3.12[35804]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:43 managed-node2 python3.12[35960]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd1-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:43 managed-node2 python3.12[36116]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd2-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:43 managed-node2 python3.12[36272]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd3-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:46 managed-node2 python3.12[36583]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:47 managed-node2 python3.12[36744]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:50 managed-node2 python3.12[36901]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:12:51 managed-node2 python3.12[37057]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nJul 07 20:12:51 managed-node2 python3.12[37214]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nJul 07 20:12:52 managed-node2 python3.12[37371]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:12:54 managed-node2 python3.12[37526]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:12:55 managed-node2 python3.12[37683]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:12:55 managed-node2 python3.12[37839]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:12:56 managed-node2 python3.12[37996]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 07 20:12:58 managed-node2 python3.12[38193]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 07 20:12:59 managed-node2 python3.12[38348]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 07 20:13:03 managed-node2 python3.12[38503]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 07 20:13:03 managed-node2 python3.12[38659]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:04 managed-node2 python3.12[38817]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:04 managed-node2 python3.12[38973]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:05 managed-node2 python3.12[39129]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:06 managed-node2 python3.12[39285]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 07 20:13:07 managed-node2 python3.12[39440]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:07 managed-node2 python3.12[39595]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:07 managed-node2 sudo[39800]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sggfuwotsbnqelqoxqdpnxcztrgpfxov ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933587.546744-15228-221616196309386/AnsiballZ_podman_image.py'\nJul 07 20:13:07 managed-node2 sudo[39800]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:07 managed-node2 systemd[27434]: Started podman-39804.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 116.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39811.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 120.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39818.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 124.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39825.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 128.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39832.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 132.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39839.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 136.\nJul 07 20:13:08 managed-node2 sudo[39800]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:08 managed-node2 python3.12[40000]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:09 managed-node2 python3.12[40157]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:09 managed-node2 python3.12[40312]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:10 managed-node2 python3.12[40390]: ansible-ansible.legacy.file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=.g46gggh2 recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:10 managed-node2 sudo[40595]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lwtednmiooezcolvhoaprqmyaqljtcnz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933590.280253-15331-51302432010459/AnsiballZ_podman_play.py'\nJul 07 20:13:10 managed-node2 sudo[40595]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:13:10 managed-node2 systemd[27434]: Started podman-40605.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 140.\nJul 07 20:13:10 managed-node2 systemd[27434]: Created slice user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice - cgroup user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 144.\nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-07T20:13:10-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-07T20:13:10-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"systemd-logind: Unknown object '/'.\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2025-07-07T20:13:10-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 8ffa2dc6ff76aec6ab19c0699b0508615d79e3b6d14dd7cd78be0f62c7718f3e bridge podman1 2025-07-07 20:11:45.408967969 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Created cgroup path user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice for parent user.slice and name libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Created cgroup user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice\"\n Error: adding pod to state: name \"httpd1\" is in use: pod already exists\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Shutting down engines\"\n time=\"2025-07-07T20:13:10-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=40605\nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 07 20:13:10 managed-node2 sudo[40595]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:11 managed-node2 python3.12[40767]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:13:12 managed-node2 python3.12[40923]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:13 managed-node2 python3.12[41080]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:14 managed-node2 python3.12[41236]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:14 managed-node2 python3.12[41391]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:15 managed-node2 podman[41576]: 2025-07-07 20:13:15.556653463 -0400 EDT m=+0.319976011 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:13:15 managed-node2 python3.12[41746]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:16 managed-node2 python3.12[41903]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:16 managed-node2 python3.12[42058]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:17 managed-node2 python3.12[42136]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=.cnp2b107 recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:13:17 managed-node2 podman[42298]: 2025-07-07 20:13:17.610100559 -0400 EDT m=+0.016705745 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:13:17 managed-node2 systemd[1]: Created slice machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice - cgroup machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2432.\nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Initializing event backend journald\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba bridge podman1 2025-07-07 20:10:03.41385383 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice for parent machine.slice and name libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice\"\n Error: adding pod to state: name \"httpd2\" is in use: pod already exists\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Shutting down engines\"\n time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=42298\nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 07 20:13:18 managed-node2 python3.12[42459]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:20 managed-node2 python3.12[42616]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:20 managed-node2 python3.12[42772]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:21 managed-node2 python3.12[42927]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:22 managed-node2 podman[43112]: 2025-07-07 20:13:22.329599025 -0400 EDT m=+0.443555601 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:13:22 managed-node2 python3.12[43281]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:23 managed-node2 python3.12[43438]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:23 managed-node2 python3.12[43593]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:24 managed-node2 python3.12[43671]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=.garaxq8q recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:24 managed-node2 python3.12[43826]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:13:24 managed-node2 podman[43834]: 2025-07-07 20:13:24.584154544 -0400 EDT m=+0.014632770 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:13:24 managed-node2 systemd[1]: Created slice machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice - cgroup machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2438.\nJul 07 20:13:25 managed-node2 sudo[44045]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fkixybyzrrzuqcjcfmgozxwfwmajookd ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933605.3310475-16168-26421251595513/AnsiballZ_command.py'\nJul 07 20:13:25 managed-node2 sudo[44045]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:25 managed-node2 python3.12[44048]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:25 managed-node2 systemd[27434]: Started podman-44056.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 148.\nJul 07 20:13:25 managed-node2 sudo[44045]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:26 managed-node2 python3.12[44220]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:26 managed-node2 python3.12[44383]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:26 managed-node2 sudo[44596]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ydnvtlhbpgwizyiplpnqjwanppjzhlbg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933606.7243044-16231-16521388663911/AnsiballZ_command.py'\nJul 07 20:13:26 managed-node2 sudo[44596]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:27 managed-node2 python3.12[44599]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:27 managed-node2 sudo[44596]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:27 managed-node2 python3.12[44757]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:27 managed-node2 python3.12[44915]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:28 managed-node2 python3.12[45073]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:28 managed-node2 python3.12[45229]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:29 managed-node2 python3.12[45385]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:31 managed-node2 python3.12[45696]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:32 managed-node2 python3.12[45857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:36 managed-node2 python3.12[46014]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 07 20:13:36 managed-node2 python3.12[46170]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:37 managed-node2 python3.12[46327]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:37 managed-node2 python3.12[46483]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:38 managed-node2 python3.12[46639]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:39 managed-node2 python3.12[46795]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:39 managed-node2 sudo[47002]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cdbeefpvsgoajxqmxzaiihkscmhhxllh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933619.5337389-16868-45510891100291/AnsiballZ_systemd.py'\nJul 07 20:13:39 managed-node2 sudo[47002]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:40 managed-node2 python3.12[47005]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:13:40 managed-node2 systemd[27434]: Reload requested from client PID 47008 ('systemctl')...\nJul 07 20:13:40 managed-node2 systemd[27434]: Reloading...\nJul 07 20:13:40 managed-node2 systemd[27434]: Reloading finished in 62 ms.\nJul 07 20:13:40 managed-node2 systemd[27434]: Stopping podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 152.\nJul 07 20:13:50 managed-node2 podman[47019]: time=\"2025-07-07T20:13:50-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:13:50 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:13:50 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:13:50 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:13:50 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:13:50 managed-node2 systemd[27434]: Removed slice user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice - cgroup user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 153 and the job result is done.\nJul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory\nJul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory\nJul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory\nJul 07 20:13:50 managed-node2 podman[47019]: Pods stopped:\nJul 07 20:13:50 managed-node2 podman[47019]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6\nJul 07 20:13:50 managed-node2 podman[47019]: Pods removed:\nJul 07 20:13:50 managed-node2 podman[47019]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6\nJul 07 20:13:50 managed-node2 podman[47019]: Secrets removed:\nJul 07 20:13:50 managed-node2 podman[47019]: Volumes removed:\nJul 07 20:13:50 managed-node2 systemd[27434]: Stopped podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 152 and the job result is done.\nJul 07 20:13:50 managed-node2 systemd[27434]: podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service: Consumed 589ms CPU time, 74.3M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT completed and consumed the indicated resources.\nJul 07 20:13:50 managed-node2 sudo[47002]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:50 managed-node2 python3.12[47222]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:51 managed-node2 sudo[47429]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ragjadshxwcsmhyabehcrpjvrodecwop ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933631.1425395-17190-192702739809308/AnsiballZ_podman_play.py'\nJul 07 20:13:51 managed-node2 sudo[47429]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play version: 5.5.1, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 07 20:13:51 managed-node2 systemd[27434]: Started podman-47439.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 154.\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 07 20:13:51 managed-node2 sudo[47429]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:52 managed-node2 python3.12[47600]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:53 managed-node2 python3.12[47755]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:13:53 managed-node2 python3.12[47911]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:54 managed-node2 python3.12[48068]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:55 managed-node2 python3.12[48224]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:13:55 managed-node2 systemd[1]: Reload requested from client PID 48227 ('systemctl') (unit session-5.scope)...\nJul 07 20:13:55 managed-node2 systemd[1]: Reloading...\nJul 07 20:13:55 managed-node2 systemd-rc-local-generator[48270]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:13:55 managed-node2 systemd[1]: Reloading finished in 218 ms.\nJul 07 20:13:55 managed-node2 systemd[1]: Stopping podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2445.\nJul 07 20:13:56 managed-node2 podman[48286]: 2025-07-07 20:13:56.040685373 -0400 EDT m=+0.023115658 pod stop 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2)\nJul 07 20:14:06 managed-node2 podman[48286]: time=\"2025-07-07T20:14:06-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:14:06 managed-node2 systemd[1]: libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.074195184 -0400 EDT m=+10.056625730 container died 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9fa181bdd3e6904e4a3e75e30d4505da5f0cd638ecd007c086bea8376c79fc52-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-9fa181bdd3e6904e4a3e75e30d4505da5f0cd638ecd007c086bea8376c79fc52-merged.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.11235352 -0400 EDT m=+10.094783772 container cleanup 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:14:06 managed-node2 systemd[1]: libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.131606724 -0400 EDT m=+10.114044140 container died 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:14:06 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:14:06 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:14:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:14:06 managed-node2 systemd[1]: run-netns-netns\\x2d82249bc3\\x2db382\\x2d7b9a\\x2d81bc\\x2d86e8308d188a.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d82249bc3\\x2db382\\x2d7b9a\\x2d81bc\\x2d86e8308d188a.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.202757575 -0400 EDT m=+10.185187931 container cleanup 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 systemd[1]: Removed slice machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice - cgroup machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2446 and the job result is done.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.23022593 -0400 EDT m=+10.212656217 container remove 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.25693135 -0400 EDT m=+10.239361641 container remove 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 systemd[1]: machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice: Failed to open /run/systemd/transient/machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice: No such file or directory\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.265332892 -0400 EDT m=+10.247763153 pod remove 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2)\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.268022285 -0400 EDT m=+10.250452816 container kill 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 systemd[1]: libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.274842916 -0400 EDT m=+10.257273277 container died 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.336039419 -0400 EDT m=+10.318469707 container remove 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 podman[48286]: Pods stopped:\nJul 07 20:14:06 managed-node2 podman[48286]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98\nJul 07 20:14:06 managed-node2 podman[48286]: Pods removed:\nJul 07 20:14:06 managed-node2 podman[48286]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98\nJul 07 20:14:06 managed-node2 podman[48286]: Secrets removed:\nJul 07 20:14:06 managed-node2 podman[48286]: Volumes removed:\nJul 07 20:14:06 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 systemd[1]: Stopped podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2445 and the job result is done.\nJul 07 20:14:06 managed-node2 python3.12[48488]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 07 20:14:07 managed-node2 python3.12[48813]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:08 managed-node2 python3.12[48969]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:10 managed-node2 python3.12[49126]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:10 managed-node2 python3.12[49282]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:14:10 managed-node2 systemd[1]: Reload requested from client PID 49285 ('systemctl') (unit session-5.scope)...\nJul 07 20:14:10 managed-node2 systemd[1]: Reloading...\nJul 07 20:14:11 managed-node2 systemd-rc-local-generator[49327]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:14:11 managed-node2 systemd[1]: Reloading finished in 211 ms.\nJul 07 20:14:11 managed-node2 systemd[1]: Stopping podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2447.\nJul 07 20:14:11 managed-node2 podman[49344]: 2025-07-07 20:14:11.206854573 -0400 EDT m=+0.022831781 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:14:21 managed-node2 podman[49344]: time=\"2025-07-07T20:14:21-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:14:21 managed-node2 systemd[1]: libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.240228718 -0400 EDT m=+10.056206048 container died 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test)\nJul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay-350f35f9a3dec1a954b9c8301592ec6c7cff326df9aa3350ca38ff2248bf45f0-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-350f35f9a3dec1a954b9c8301592ec6c7cff326df9aa3350ca38ff2248bf45f0-merged.mount has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.280265572 -0400 EDT m=+10.096242780 container cleanup 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:14:21 managed-node2 systemd[1]: libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.298799022 -0400 EDT m=+10.114776654 container died afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 systemd[1]: run-p32110-i32410.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p32110-i32410.scope has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:14:21 managed-node2 kernel: veth1 (unregistering): left allmulticast mode\nJul 07 20:14:21 managed-node2 kernel: veth1 (unregistering): left promiscuous mode\nJul 07 20:14:21 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:14:21 managed-node2 NetworkManager[714]: [1751933661.3375] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:14:21 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2448.\nJul 07 20:14:21 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2448.\nJul 07 20:14:21 managed-node2 systemd[1]: run-netns-netns\\x2dd8d23001\\x2dccdd\\x2d98d5\\x2d0185\\x2d01ce80e8c916.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2dd8d23001\\x2dccdd\\x2d98d5\\x2d0185\\x2d01ce80e8c916.mount has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.414963852 -0400 EDT m=+10.230941061 container cleanup afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 systemd[1]: Removed slice machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice - cgroup machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2527 and the job result is done.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.422214439 -0400 EDT m=+10.238191678 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.428641853 -0400 EDT m=+10.244619177 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.436145908 -0400 EDT m=+10.252123291 container kill f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 systemd[1]: libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.451967497 -0400 EDT m=+10.267945068 container died f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.478053211 -0400 EDT m=+10.294030464 container remove 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.504395032 -0400 EDT m=+10.320372288 container remove afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.513441882 -0400 EDT m=+10.329419091 pod remove f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:14:21 managed-node2 podman[49397]: 2025-07-07 20:14:21.533507126 -0400 EDT m=+0.087614608 container cleanup f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.560230359 -0400 EDT m=+10.376207602 container remove f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 podman[49344]: Pods stopped:\nJul 07 20:14:21 managed-node2 podman[49344]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f\nJul 07 20:14:21 managed-node2 podman[49344]: Pods removed:\nJul 07 20:14:21 managed-node2 podman[49344]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f\nJul 07 20:14:21 managed-node2 podman[49344]: Secrets removed:\nJul 07 20:14:21 managed-node2 podman[49344]: Volumes removed:\nJul 07 20:14:21 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 systemd[1]: Stopped podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2447 and the job result is done.\nJul 07 20:14:21 managed-node2 python3.12[49562]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 python3.12[49720]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:14:22 managed-node2 python3.12[49720]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 python3.12[49888]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:23 managed-node2 python3.12[50043]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 07 20:14:24 managed-node2 python3.12[50199]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:24 managed-node2 sudo[50407]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jpalnwprgspnzsnsvsnylrerubrespev ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933664.3660543-18219-69893342173436/AnsiballZ_podman_container_info.py'\nJul 07 20:14:24 managed-node2 sudo[50407]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:24 managed-node2 python3.12[50410]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None\nJul 07 20:14:24 managed-node2 systemd[27434]: Started podman-50411.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 158.\nJul 07 20:14:24 managed-node2 sudo[50407]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:25 managed-node2 sudo[50623]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lygojmlyopjnwoxfpskxczdponstloyj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933664.983715-18245-114379203266243/AnsiballZ_command.py'\nJul 07 20:14:25 managed-node2 sudo[50623]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:25 managed-node2 python3.12[50626]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:25 managed-node2 systemd[27434]: Started podman-50627.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 162.\nJul 07 20:14:25 managed-node2 sudo[50623]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:25 managed-node2 sudo[50839]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iwlmlwzhyduyhuxfusqoosixqzgzqvsl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933665.559254-18262-213663586865707/AnsiballZ_command.py'\nJul 07 20:14:25 managed-node2 sudo[50839]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:25 managed-node2 python3.12[50842]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:25 managed-node2 systemd[27434]: Started podman-50843.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 166.\nJul 07 20:14:25 managed-node2 sudo[50839]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:26 managed-node2 python3.12[51004]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None\nJul 07 20:14:26 managed-node2 systemd[1]: Stopping user@3001.service - User Manager for UID 3001...\n\u2591\u2591 Subject: A stop job for unit user@3001.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user@3001.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2529.\nJul 07 20:14:26 managed-node2 systemd[27434]: Activating special unit exit.target...\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopping podman-pause-8c2d7b35.scope...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 177.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice app-podman\\x2dkube.slice - Slice /app/podman-kube.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 181 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: app-podman\\x2dkube.slice: Consumed 589ms CPU time, 74.4M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT completed and consumed the indicated resources.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice - cgroup user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 178 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target default.target - Main User Target.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 191 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target basic.target - Basic System.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 190 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target paths.target - Paths.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 184 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target sockets.target - Sockets.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 187 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target timers.target - Timers.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 189 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 186 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 185 and the job result is done.\nJul 07 20:14:26 managed-node2 dbus-broker[27991]: Dispatched 2275 messages @ 3(\u00b113)\u03bcs / message.\n\u2591\u2591 Subject: Dispatched 2275 messages\n\u2591\u2591 Defined-By: dbus-broker\n\u2591\u2591 Support: https://groups.google.com/forum/#!forum/bus1-devel\n\u2591\u2591 \n\u2591\u2591 This message is printed by dbus-broker when shutting down. It includes metric\n\u2591\u2591 information collected during the runtime of dbus-broker.\n\u2591\u2591 \n\u2591\u2591 The message lists the number of dispatched messages\n\u2591\u2591 (in this case 2275) as well as the mean time to\n\u2591\u2591 handling a single message. The time measurements exclude the time spent on\n\u2591\u2591 writing to and reading from the kernel.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopping dbus-broker.service - D-Bus User Message Bus...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 174.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 179 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped podman-pause-8c2d7b35.scope.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 177 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice user.slice - Slice /user.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 176 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped dbus-broker.service - D-Bus User Message Bus.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 174 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice session.slice - User Core Session Slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 175 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Closed dbus.socket - D-Bus User Message Bus Socket.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 180 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice app.slice - User Application Slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 182 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: app.slice: Consumed 615ms CPU time, 74.5M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT completed and consumed the indicated resources.\nJul 07 20:14:26 managed-node2 systemd[27434]: Reached target shutdown.target - Shutdown.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 173.\nJul 07 20:14:26 managed-node2 systemd[27434]: Finished systemd-exit.service - Exit the Session.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 171.\nJul 07 20:14:26 managed-node2 systemd[27434]: Reached target exit.target - Exit the Session.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 170.\nJul 07 20:14:26 managed-node2 systemd-logind[659]: Removed session 6.\n\u2591\u2591 Subject: Session 6 has been terminated\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: sd-login(3)\n\u2591\u2591 \n\u2591\u2591 A session with the ID 6 has been terminated.\nJul 07 20:14:26 managed-node2 systemd[1]: user@3001.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit user@3001.service has successfully entered the 'dead' state.\nJul 07 20:14:26 managed-node2 systemd[1]: Stopped user@3001.service - User Manager for UID 3001.\n\u2591\u2591 Subject: A stop job for unit user@3001.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user@3001.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2529 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[1]: user@3001.service: Consumed 2.005s CPU time, 92.7M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit user@3001.service completed and consumed the indicated resources.\nJul 07 20:14:26 managed-node2 systemd[1]: Stopping user-runtime-dir@3001.service - User Runtime Directory /run/user/3001...\n\u2591\u2591 Subject: A stop job for unit user-runtime-dir@3001.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user-runtime-dir@3001.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2528.\nJul 07 20:14:26 managed-node2 systemd[1]: run-user-3001.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-user-3001.mount has successfully entered the 'dead' state.\nJul 07 20:14:26 managed-node2 systemd[1]: user-runtime-dir@3001.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit user-runtime-dir@3001.service has successfully entered the 'dead' state.\nJul 07 20:14:26 managed-node2 systemd[1]: Stopped user-runtime-dir@3001.service - User Runtime Directory /run/user/3001.\n\u2591\u2591 Subject: A stop job for unit user-runtime-dir@3001.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user-runtime-dir@3001.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2528 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[1]: Removed slice user-3001.slice - User Slice of UID 3001.\n\u2591\u2591 Subject: A stop job for unit user-3001.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user-3001.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2530 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[1]: user-3001.slice: Consumed 2.031s CPU time, 92.8M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit user-3001.slice completed and consumed the indicated resources.\nJul 07 20:14:26 managed-node2 python3.12[51165]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:29 managed-node2 python3.12[51321]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:31 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:14:31 managed-node2 python3.12[51478]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:33 managed-node2 python3.12[51634]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:36 managed-node2 python3.12[51790]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:38 managed-node2 python3.12[51946]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:39 managed-node2 sudo[52152]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tfrixhsqpnpoemptjbnyouggwynzakes ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933679.2995527-18658-251091507513885/AnsiballZ_command.py'\nJul 07 20:14:39 managed-node2 sudo[52152]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:39 managed-node2 python3.12[52155]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:39 managed-node2 sudo[52152]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:40 managed-node2 python3.12[52317]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd2 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:40 managed-node2 python3.12[52479]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd3 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:40 managed-node2 sudo[52692]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tblibtrtarfdedjsuypqezvumnboznzr ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933680.662618-18720-8327876079082/AnsiballZ_command.py'\nJul 07 20:14:40 managed-node2 sudo[52692]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:40 managed-node2 python3.12[52695]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:41 managed-node2 sudo[52692]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:41 managed-node2 python3.12[52853]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:41 managed-node2 python3.12[53011]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:42 managed-node2 python3.12[53169]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:44 managed-node2 python3.12[53479]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:45 managed-node2 python3.12[53640]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:45 managed-node2 python3.12[53796]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:48 managed-node2 python3.12[53953]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 07 20:14:48 managed-node2 python3.12[54109]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:49 managed-node2 python3.12[54266]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:49 managed-node2 python3.12[54422]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:50 managed-node2 python3.12[54578]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:51 managed-node2 python3.12[54734]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:51 managed-node2 python3.12[54889]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:52 managed-node2 python3.12[55044]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:53 managed-node2 python3.12[55199]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:53 managed-node2 python3.12[55355]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:55 managed-node2 python3.12[55512]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:56 managed-node2 python3.12[55668]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:14:56 managed-node2 python3.12[55825]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:56 managed-node2 python3.12[55980]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:58 managed-node2 python3.12[56135]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:59 managed-node2 python3.12[56292]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:00 managed-node2 python3.12[56448]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:15:00 managed-node2 python3.12[56605]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:01 managed-node2 python3.12[56760]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:02 managed-node2 python3.12[56915]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 07 20:15:02 managed-node2 python3.12[57071]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:03 managed-node2 python3.12[57226]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:04 managed-node2 python3.12[57381]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:06 managed-node2 python3.12[57587]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:07 managed-node2 python3.12[57771]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:07 managed-node2 python3.12[57926]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:09 managed-node2 python3.12[58236]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:10 managed-node2 python3.12[58398]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:10 managed-node2 python3.12[58554]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:12 managed-node2 python3.12[58711]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:13 managed-node2 python3.12[58868]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:14 managed-node2 python3.12[59023]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:14 managed-node2 python3.12[59148]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933714.0353012-19840-138139497644559/.source.pod dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:15 managed-node2 python3.12[59303]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:15:15 managed-node2 systemd[1]: Reload requested from client PID 59304 ('systemctl') (unit session-5.scope)...\nJul 07 20:15:15 managed-node2 systemd[1]: Reloading...\nJul 07 20:15:15 managed-node2 systemd-rc-local-generator[59347]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:15:15 managed-node2 systemd[1]: Reloading finished in 191 ms.\nJul 07 20:15:16 managed-node2 python3.12[59512]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:15:16 managed-node2 systemd[1]: Starting quadlet-pod-pod-pod.service...\n\u2591\u2591 Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-pod-pod-pod.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2532.\nJul 07 20:15:16 managed-node2 systemd[1]: Created slice machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice - cgroup machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2616.\nJul 07 20:15:16 managed-node2 podman[59516]: 2025-07-07 20:15:16.265732025 -0400 EDT m=+0.061820196 container create 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service)\nJul 07 20:15:16 managed-node2 podman[59516]: 2025-07-07 20:15:16.272319983 -0400 EDT m=+0.068408122 pod create fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 (image=, name=quadlet-pod)\nJul 07 20:15:16 managed-node2 quadlet-pod-pod-pod[59516]: fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519\nJul 07 20:15:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3361] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/9)\nJul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered blocking state\nJul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered disabled state\nJul 07 20:15:16 managed-node2 kernel: veth0: entered allmulticast mode\nJul 07 20:15:16 managed-node2 kernel: veth0: entered promiscuous mode\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3447] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10)\nJul 07 20:15:16 managed-node2 (udev-worker)[59536]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:15:16 managed-node2 (udev-worker)[59537]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered blocking state\nJul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered forwarding state\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3546] device (veth0): carrier: link connected\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3548] device (podman0): carrier: link connected\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3683] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3701] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3707] device (podman0): Activation: starting connection 'podman0' (845713e4-a167-41fd-b540-293fb4a7aacd)\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3708] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3710] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3712] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3714] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2622.\nJul 07 20:15:16 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2622.\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4101] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4105] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4116] device (podman0): Activation: successful, device activated.\nJul 07 20:15:16 managed-node2 systemd[1]: Started libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2701.\nJul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.467418922 -0400 EDT m=+0.168638968 container init 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service)\nJul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.469862017 -0400 EDT m=+0.171082040 container start 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service)\nJul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.47596405 -0400 EDT m=+0.177184035 pod start fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 (image=, name=quadlet-pod)\nJul 07 20:15:16 managed-node2 quadlet-pod-pod-pod[59525]: quadlet-pod\nJul 07 20:15:16 managed-node2 systemd[1]: Started quadlet-pod-pod-pod.service.\n\u2591\u2591 Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-pod-pod-pod.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2532.\nJul 07 20:15:17 managed-node2 python3.12[59737]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:18 managed-node2 podman[59923]: 2025-07-07 20:15:18.970989919 -0400 EDT m=+0.333232348 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:15:19 managed-node2 python3.12[60094]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:19 managed-node2 python3.12[60249]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:20 managed-node2 python3.12[60374]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933719.570603-19951-98699047571118/.source.container dest=/etc/containers/systemd/quadlet-pod-container.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:20 managed-node2 python3.12[60529]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:15:20 managed-node2 systemd[1]: Reload requested from client PID 60530 ('systemctl') (unit session-5.scope)...\nJul 07 20:15:20 managed-node2 systemd[1]: Reloading...\nJul 07 20:15:20 managed-node2 systemd-rc-local-generator[60574]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:15:20 managed-node2 systemd[1]: Reloading finished in 199 ms.\nJul 07 20:15:21 managed-node2 python3.12[60739]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:15:21 managed-node2 systemd[1]: Starting quadlet-pod-container.service...\n\u2591\u2591 Subject: A start job for unit quadlet-pod-container.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-pod-container.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2708.\nJul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.598559882 -0400 EDT m=+0.046043355 container create 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service)\nJul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.641059859 -0400 EDT m=+0.088543485 container init 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z)\nJul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.643379876 -0400 EDT m=+0.090863506 container start 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:15:21 managed-node2 systemd[1]: Started quadlet-pod-container.service.\n\u2591\u2591 Subject: A start job for unit quadlet-pod-container.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-pod-container.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2708.\nJul 07 20:15:21 managed-node2 quadlet-pod-container[60743]: 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d\nJul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.576103282 -0400 EDT m=+0.023586939 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:15:22 managed-node2 python3.12[60912]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:23 managed-node2 python3.12[61068]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:24 managed-node2 python3.12[61224]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}}\n {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:25 managed-node2 python3.12[61388]: ansible-user Invoked with name=user_quadlet_pod uid=2223 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:15:25 managed-node2 useradd[61390]: new group: name=user_quadlet_pod, GID=2223\nJul 07 20:15:25 managed-node2 useradd[61390]: new user: name=user_quadlet_pod, UID=2223, GID=2223, home=/home/user_quadlet_pod, shell=/bin/bash, from=/dev/pts/0\nJul 07 20:15:25 managed-node2 rsyslogd[883]: imjournal: journal files changed, reloading... [v8.2506.0-1.el10 try https://www.rsyslog.com/e/0 ]\nJul 07 20:15:25 managed-node2 rsyslogd[883]: imjournal: journal files changed, reloading... [v8.2506.0-1.el10 try https://www.rsyslog.com/e/0 ]\nJul 07 20:15:26 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:15:27 managed-node2 python3.12[61703]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:28 managed-node2 python3.12[61866]: ansible-getent Invoked with database=passwd key=user_quadlet_pod fail_key=False service=None split=None\nJul 07 20:15:28 managed-node2 python3.12[62022]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:29 managed-node2 python3.12[62180]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:29 managed-node2 python3.12[62336]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:31 managed-node2 python3.12[62492]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:32 managed-node2 python3.12[62649]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:32 managed-node2 python3.12[62805]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:33 managed-node2 python3.12[62961]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 07 20:15:33 managed-node2 systemd[1]: Created slice user-2223.slice - User Slice of UID 2223.\n\u2591\u2591 Subject: A start job for unit user-2223.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-2223.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2871.\nJul 07 20:15:33 managed-node2 systemd[1]: Starting user-runtime-dir@2223.service - User Runtime Directory /run/user/2223...\n\u2591\u2591 Subject: A start job for unit user-runtime-dir@2223.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-runtime-dir@2223.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2793.\nJul 07 20:15:34 managed-node2 systemd[1]: Finished user-runtime-dir@2223.service - User Runtime Directory /run/user/2223.\n\u2591\u2591 Subject: A start job for unit user-runtime-dir@2223.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-runtime-dir@2223.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2793.\nJul 07 20:15:34 managed-node2 systemd[1]: Starting user@2223.service - User Manager for UID 2223...\n\u2591\u2591 Subject: A start job for unit user@2223.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user@2223.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2873.\nJul 07 20:15:34 managed-node2 systemd-logind[659]: New session 7 of user user_quadlet_pod.\n\u2591\u2591 Subject: A new session 7 has been created for user user_quadlet_pod\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: sd-login(3)\n\u2591\u2591 \n\u2591\u2591 A new session with the ID 7 has been created for the user user_quadlet_pod.\n\u2591\u2591 \n\u2591\u2591 The leading process of the session is 62973.\nJul 07 20:15:34 managed-node2 (systemd)[62973]: pam_unix(systemd-user:session): session opened for user user_quadlet_pod(uid=2223) by user_quadlet_pod(uid=0)\nJul 07 20:15:34 managed-node2 systemd[62973]: Queued start job for default target default.target.\nJul 07 20:15:34 managed-node2 systemd[62973]: Created slice app.slice - User Application Slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5.\nJul 07 20:15:34 managed-node2 systemd[62973]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 9.\nJul 07 20:15:34 managed-node2 systemd[62973]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 10.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target paths.target - Paths.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 11.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target timers.target - Timers.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 8.\nJul 07 20:15:34 managed-node2 systemd[62973]: Starting dbus.socket - D-Bus User Message Bus Socket...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4.\nJul 07 20:15:34 managed-node2 systemd[62973]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 12.\nJul 07 20:15:34 managed-node2 systemd[62973]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 12.\nJul 07 20:15:34 managed-node2 systemd[62973]: Listening on dbus.socket - D-Bus User Message Bus Socket.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target sockets.target - Sockets.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target basic.target - Basic System.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target default.target - Main User Target.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1.\nJul 07 20:15:34 managed-node2 systemd[62973]: Startup finished in 74ms.\n\u2591\u2591 Subject: User manager start-up is now complete\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The user manager instance for user 2223 has been started. All services queued\n\u2591\u2591 for starting have been started. Note that other services might still be starting\n\u2591\u2591 up or be started at any later time.\n\u2591\u2591 \n\u2591\u2591 Startup of the manager took 74648 microseconds.\nJul 07 20:15:34 managed-node2 systemd[1]: Started user@2223.service - User Manager for UID 2223.\n\u2591\u2591 Subject: A start job for unit user@2223.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user@2223.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2873.\nJul 07 20:15:34 managed-node2 python3.12[63135]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:34 managed-node2 python3.12[63290]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:35 managed-node2 python3.12[63415]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933734.666246-20447-234683972003494/.source.pod dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:35 managed-node2 sudo[63620]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwdxpsyaqgiwosrsmcnobsvzckxuxltr ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933735.4058456-20475-145399815936558/AnsiballZ_systemd.py'\nJul 07 20:15:35 managed-node2 sudo[63620]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:35 managed-node2 python3.12[63623]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:15:35 managed-node2 python3.12[63623]: ansible-systemd [WARNING] Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually\nJul 07 20:15:35 managed-node2 systemd[62973]: Reload requested from client PID 63624 ('systemctl')...\nJul 07 20:15:35 managed-node2 systemd[62973]: Reloading...\nJul 07 20:15:35 managed-node2 systemd[62973]: Reloading finished in 42 ms.\nJul 07 20:15:35 managed-node2 sudo[63620]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:36 managed-node2 sudo[63839]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ofgmoaezuncvsjzylzfbwkllzxyiawqa ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933736.0668013-20497-124358191920012/AnsiballZ_systemd.py'\nJul 07 20:15:36 managed-node2 sudo[63839]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:36 managed-node2 python3.12[63842]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:15:36 managed-node2 systemd[62973]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user....\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 25.\nJul 07 20:15:36 managed-node2 sh[63846]: active\nJul 07 20:15:36 managed-node2 systemd[62973]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user..\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 25.\nJul 07 20:15:36 managed-node2 systemd[62973]: Starting quadlet-pod-pod-pod.service...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 13.\nJul 07 20:15:36 managed-node2 systemd[62973]: Created slice session.slice - User Core Session Slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 27.\nJul 07 20:15:36 managed-node2 systemd[62973]: Starting dbus-broker.service - D-Bus User Message Bus...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 26.\nJul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored\nJul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored\nJul 07 20:15:36 managed-node2 systemd[62973]: Started dbus-broker.service - D-Bus User Message Bus.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 26.\nJul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Ready\nJul 07 20:15:36 managed-node2 systemd[62973]: Created slice user.slice - Slice /user.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 33.\nJul 07 20:15:36 managed-node2 systemd[62973]: Created slice user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice - cgroup user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 32.\nJul 07 20:15:36 managed-node2 quadlet-pod-pod-pod[63855]: 704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de\nJul 07 20:15:36 managed-node2 systemd[62973]: Started podman-pause-569872be.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 36.\nJul 07 20:15:36 managed-node2 systemd[62973]: Started libpod-31f1b98498c312272d594cc143121f0c4e208b416f5b06370302a3ade84678f0.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 40.\nJul 07 20:15:36 managed-node2 quadlet-pod-pod-pod[63874]: quadlet-pod\nJul 07 20:15:36 managed-node2 systemd[62973]: Started quadlet-pod-pod-pod.service.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 13.\nJul 07 20:15:36 managed-node2 sudo[63839]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:37 managed-node2 python3.12[64055]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:38 managed-node2 python3.12[64212]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:38 managed-node2 python3.12[64368]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:39 managed-node2 python3.12[64524]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 07 20:15:40 managed-node2 sudo[64729]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-opzncrxhlgpablgicvdgsyjydbaatunc ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933740.2555504-20657-253835226843587/AnsiballZ_podman_image.py'\nJul 07 20:15:40 managed-node2 sudo[64729]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:40 managed-node2 systemd[62973]: Started podman-64733.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 45.\nJul 07 20:15:40 managed-node2 systemd[62973]: Started podman-64740.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 49.\nJul 07 20:15:41 managed-node2 systemd[62973]: Started podman-64765.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 53.\nJul 07 20:15:41 managed-node2 sudo[64729]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:41 managed-node2 python3.12[64927]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:42 managed-node2 python3.12[65082]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:42 managed-node2 python3.12[65207]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933742.1096382-20721-81203128614920/.source.container dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:43 managed-node2 sudo[65412]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dradgtbmatpvlibzybrbrgncbuvsbmla ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933742.8958044-20756-33448022762234/AnsiballZ_systemd.py'\nJul 07 20:15:43 managed-node2 sudo[65412]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:43 managed-node2 python3.12[65415]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:15:43 managed-node2 systemd[62973]: Reload requested from client PID 65416 ('systemctl')...\nJul 07 20:15:43 managed-node2 systemd[62973]: Reloading...\nJul 07 20:15:43 managed-node2 systemd[62973]: Reloading finished in 50 ms.\nJul 07 20:15:43 managed-node2 sudo[65412]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:43 managed-node2 sudo[65630]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ypfsehmisorrgovabzrcscgtxcipauhh ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933743.5809875-20783-151961798381358/AnsiballZ_systemd.py'\nJul 07 20:15:43 managed-node2 sudo[65630]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:44 managed-node2 python3.12[65633]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:15:44 managed-node2 systemd[62973]: Starting quadlet-pod-container.service...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 57.\nJul 07 20:15:44 managed-node2 systemd[62973]: Started quadlet-pod-container.service.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 57.\nJul 07 20:15:44 managed-node2 quadlet-pod-container[65636]: f4293ce7df9976771eedba45cd946f75d5668af766c076f83710b7afcc49d748\nJul 07 20:15:44 managed-node2 sudo[65630]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:44 managed-node2 python3.12[65805]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:45 managed-node2 python3.12[65961]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:45 managed-node2 sudo[66167]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkgmctvpahwcnyvziosokhuvkvmstaqp ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933745.1806111-20829-116188117400154/AnsiballZ_command.py'\nJul 07 20:15:45 managed-node2 sudo[66167]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:45 managed-node2 python3.12[66170]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}}\n {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:45 managed-node2 systemd[62973]: Started podman-66171.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 71.\nJul 07 20:15:45 managed-node2 sudo[66167]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:46 managed-node2 python3.12[66333]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:47 managed-node2 python3.12[66645]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:48 managed-node2 python3.12[66806]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:49 managed-node2 python3.12[66963]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:49 managed-node2 python3.12[67119]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:51 managed-node2 python3.12[67275]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:51 managed-node2 python3.12[67432]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:52 managed-node2 python3.12[67588]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:52 managed-node2 python3.12[67744]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:53 managed-node2 sudo[67951]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qbrxmfhyqxuzmhgzlevjndrxencddmhb ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933753.0681589-21169-130280143861349/AnsiballZ_systemd.py'\nJul 07 20:15:53 managed-node2 sudo[67951]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:53 managed-node2 python3.12[67954]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:15:53 managed-node2 systemd[62973]: Reload requested from client PID 67957 ('systemctl')...\nJul 07 20:15:53 managed-node2 systemd[62973]: Reloading...\nJul 07 20:15:53 managed-node2 systemd[62973]: Reloading finished in 49 ms.\nJul 07 20:15:53 managed-node2 systemd[62973]: Stopping quadlet-pod-container.service...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 75.\nJul 07 20:16:03 managed-node2 quadlet-pod-container[67969]: time=\"2025-07-07T20:16:03-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container quadlet-pod-container in 10 seconds, resorting to SIGKILL\"\nJul 07 20:16:03 managed-node2 quadlet-pod-container[67969]: f4293ce7df9976771eedba45cd946f75d5668af766c076f83710b7afcc49d748\nJul 07 20:16:03 managed-node2 systemd[62973]: quadlet-pod-container.service: Main process exited, code=exited, status=137/n/a\n\u2591\u2591 Subject: Unit process exited\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 An ExecStart= process belonging to unit UNIT has exited.\n\u2591\u2591 \n\u2591\u2591 The process' exit code is 'exited' and its exit status is 137.\nJul 07 20:16:03 managed-node2 systemd[62973]: Removed slice user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice - cgroup user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 76 and the job result is done.\nJul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory\nJul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory\nJul 07 20:16:03 managed-node2 quadlet-pod-pod-pod[68000]: quadlet-pod\nJul 07 20:16:03 managed-node2 systemd[62973]: quadlet-pod-container.service: Failed with result 'exit-code'.\n\u2591\u2591 Subject: Unit failed\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT has entered the 'failed' state with result 'exit-code'.\nJul 07 20:16:03 managed-node2 systemd[62973]: Stopped quadlet-pod-container.service.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 75 and the job result is done.\nJul 07 20:16:03 managed-node2 sudo[67951]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory\nJul 07 20:16:03 managed-node2 quadlet-pod-pod-pod[68018]: 704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de\nJul 07 20:16:04 managed-node2 python3.12[68183]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:05 managed-node2 python3.12[68495]: ansible-ansible.legacy.command Invoked with _raw_params=set -x\n set -o pipefail\n exec 1>&2\n #podman volume rm --all\n #podman network prune -f\n podman volume ls\n podman network ls\n podman secret ls\n podman container ls\n podman pod ls\n podman images\n systemctl list-units | grep quadlet\n systemctl list-unit-files | grep quadlet\n ls -alrtF /etc/containers/systemd\n /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:06 managed-node2 python3.12[68705]: ansible-ansible.legacy.command Invoked with _raw_params=grep type=AVC /var/log/audit/audit.log _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:06 managed-node2 python3.12[68861]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Dump journal", "task_path": "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:151" }, { "ansible_version": "2.17.12", "end_time": "2025-07-08T00:16:15.058876+00:00Z", "host": "managed-node2", "message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "start_time": "2025-07-08T00:16:15.040602+00:00Z", "task_name": "Parse quadlet file", "task_path": "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12" }, { "ansible_version": "2.17.12", "delta": "0:00:00.030213", "end_time": "2025-07-07 20:16:15.452719", "host": "managed-node2", "message": "", "rc": 0, "start_time": "2025-07-07 20:16:15.422506", "stdout": "Jul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using transient store: false\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Initializing event backend file\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d)\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=debug msg=\"Shutting down engines\"\nJul 07 20:11:58 managed-node2 /usr/bin/podman[29568]: time=\"2025-07-07T20:11:58-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=29568\nJul 07 20:11:58 managed-node2 systemd[27434]: Stopped libpod-conmon-82629828a852767ad0da38b9b0d05a725f4b41945d1a99dd832d9a13e1ccd23d.scope.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 85 and the job result is done.\nJul 07 20:11:58 managed-node2 systemd[27434]: Removed slice user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice - cgroup user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 84 and the job result is done.\nJul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory\nJul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory\nJul 07 20:11:58 managed-node2 systemd[27434]: user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824.slice: No such file or directory\nJul 07 20:11:58 managed-node2 podman[29546]: Pods stopped:\nJul 07 20:11:58 managed-node2 podman[29546]: 033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824\nJul 07 20:11:58 managed-node2 podman[29546]: Pods removed:\nJul 07 20:11:58 managed-node2 podman[29546]: 033c250dc2a1ab4f51457cd147432bce056f88f712918a995cea1173eff8c824\nJul 07 20:11:58 managed-node2 podman[29546]: Secrets removed:\nJul 07 20:11:58 managed-node2 podman[29546]: Volumes removed:\nJul 07 20:11:58 managed-node2 systemd[27434]: Created slice user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice - cgroup user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 86.\nJul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-b68ee141dcf27814664a590d406043e669f2802be350ecb975174d3342b509fb.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 90.\nJul 07 20:11:58 managed-node2 systemd[27434]: Started rootless-netns-e8ce431d.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 94.\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:11:58 managed-node2 kernel: veth0: entered allmulticast mode\nJul 07 20:11:58 managed-node2 kernel: veth0: entered promiscuous mode\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:11:58 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nJul 07 20:11:58 managed-node2 systemd[27434]: Started run-p29607-i29907.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 98.\nJul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-7f0ec0a3585717aa5dc0675fd2d3f2fb67af120ad764f3608d3ed615db7ae81d.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 102.\nJul 07 20:11:58 managed-node2 systemd[27434]: Started libpod-d63f16e390f387a3579fe8190c8b955ae0c9b5f12e78ccbea11550f14575c651.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 107.\nJul 07 20:11:58 managed-node2 podman[29546]: Pod:\nJul 07 20:11:58 managed-node2 podman[29546]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6\nJul 07 20:11:58 managed-node2 podman[29546]: Container:\nJul 07 20:11:58 managed-node2 podman[29546]: d63f16e390f387a3579fe8190c8b955ae0c9b5f12e78ccbea11550f14575c651\nJul 07 20:11:58 managed-node2 systemd[27434]: Started podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 71.\nJul 07 20:11:58 managed-node2 sudo[29540]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:11:59 managed-node2 python3.12[29790]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:11:59 managed-node2 python3.12[29946]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:01 managed-node2 python3.12[30103]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:02 managed-node2 python3.12[30259]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:02 managed-node2 python3.12[30414]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:12:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:12:03 managed-node2 podman[30600]: 2025-07-07 20:12:03.454853091 -0400 EDT m=+0.369862355 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:03 managed-node2 python3.12[30769]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:03 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:12:04 managed-node2 python3.12[30924]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:04 managed-node2 python3.12[31079]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:12:05 managed-node2 python3.12[31204]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd2.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933524.4345405-13159-278912456486895/.source.yml _original_basename=.wig720_j follow=False checksum=b4126723a3845d354fb7beda3b3f44919cb02dd7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.511414005 -0400 EDT m=+0.019368086 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:12:05 managed-node2 systemd[1]: Created slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice - cgroup machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1968.\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.55953636 -0400 EDT m=+0.067490307 container create a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.565742112 -0400 EDT m=+0.073696026 pod create 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.592586777 -0400 EDT m=+0.100540709 container create f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.567663828 -0400 EDT m=+0.075617914 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:12:05 managed-node2 kernel: veth0: entered allmulticast mode\nJul 07 20:12:05 managed-node2 kernel: veth0: entered promiscuous mode\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6188] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/3)\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6207] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4)\nJul 07 20:12:05 managed-node2 (udev-worker)[31379]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:12:05 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nJul 07 20:12:05 managed-node2 (udev-worker)[31378]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6283] device (veth0): carrier: link connected\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6285] device (podman1): carrier: link connected\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6369] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6382] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6387] device (podman1): Activation: starting connection 'podman1' (1839a03b-e916-4e56-ad20-699cf8b9a55a)\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6389] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6402] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6406] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.6413] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1974.\nJul 07 20:12:05 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1974.\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7043] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7046] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nJul 07 20:12:05 managed-node2 NetworkManager[714]: [1751933525.7051] device (podman1): Activation: successful, device activated.\nJul 07 20:12:05 managed-node2 systemd[1]: Started run-p31412-i31712.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-p31412-i31712.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p31412-i31712.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2053.\nJul 07 20:12:05 managed-node2 aardvark-dns[31412]: starting aardvark on a child with pid 31419\nJul 07 20:12:05 managed-node2 aardvark-dns[31419]: Successfully parsed config\nJul 07 20:12:05 managed-node2 aardvark-dns[31419]: Listen v4 ip {\"podman-default-kube-network\": [10.89.0.1]}\nJul 07 20:12:05 managed-node2 aardvark-dns[31419]: Listen v6 ip {}\nJul 07 20:12:05 managed-node2 aardvark-dns[31419]: Using the following upstream servers: [10.29.169.13:53, 10.29.170.12:53, 10.2.32.1:53]\nJul 07 20:12:05 managed-node2 systemd[1]: Started libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2059.\nJul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/12/attach}\nJul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : terminal_ctrl_fd: 12\nJul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : winsz read side: 16, winsz write side: 17\nJul 07 20:12:05 managed-node2 systemd[1]: Started libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2066.\nJul 07 20:12:05 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : container PID: 31426\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.796580679 -0400 EDT m=+0.304534648 container init a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.799350929 -0400 EDT m=+0.307304949 container start a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:05 managed-node2 systemd[1]: Started libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2073.\nJul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/11/attach}\nJul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : terminal_ctrl_fd: 11\nJul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : winsz read side: 15, winsz write side: 16\nJul 07 20:12:05 managed-node2 systemd[1]: Started libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2080.\nJul 07 20:12:05 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : container PID: 31431\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.854777496 -0400 EDT m=+0.362731459 container init f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.857558887 -0400 EDT m=+0.365512915 container start f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:05 managed-node2 podman[31367]: 2025-07-07 20:12:05.863542588 -0400 EDT m=+0.371496538 pod start 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2)\nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pod:\n 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\n Container:\n f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\n \nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Initializing event backend journald\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba bridge podman1 2025-07-07 20:10:03.41385383 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"no command or entrypoint provided, and no CMD or ENTRYPOINT from image: defaulting to empty string\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"setting container name 2dbe050d31da-infra\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Allocated lock 1 for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Cached value indicated that idmapped mounts for overlay are supported\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created container \\\"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Container \\\"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Container \\\"a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\\\" has run directory \\\"/run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Pulling image quay.io/libpod/testimage:20210610 (policy: missing)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Looking up image \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Normalized platform linux/amd64 to {amd64 linux [] }\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Trying \\\"quay.io/libpod/testimage:20210610\\\" ...\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"parsed reference into \\\"[overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Found image \\\"quay.io/libpod/testimage:20210610\\\" as \\\"quay.io/libpod/testimage:20210610\\\" in local containers storage ([overlay@/var/lib/containers/storage+/run/containers/storage:overlay.mountopt=nodev,metacopy=on]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"using systemd mode: false\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"adding container to pod httpd2\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"setting container name httpd2-httpd2\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Loading seccomp profile from \\\"/usr/share/containers/seccomp.json\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /proc\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /dev\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /dev/pts\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /dev/mqueue\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /sys\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding mount /sys/fs/cgroup\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Allocated lock 2 for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"exporting opaque data as blob \\\"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created container \\\"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Container \\\"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\\\" has work directory \\\"/var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Container \\\"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\\\" has run directory \\\"/run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Strongconnecting node a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Pushed a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 onto stack\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Finishing node a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307. Popped a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 off stack\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Strongconnecting node f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Pushed f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b onto stack\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Finishing node f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b. Popped f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b off stack\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Made network namespace at /run/netns/netns-1ce7c5df-883b-4dd4-e4c5-5e8b3ad8b1f6 for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created root filesystem for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 at /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/rootfs/merge\"\n [DEBUG netavark::network::validation] Validating network namespace...\n [DEBUG netavark::commands::setup] Setting up...\n [INFO netavark::firewall] Using nftables firewall driver\n [DEBUG netavark::network::bridge] Setup network podman-default-kube-network\n [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24]\n [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24]\n [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1\n [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2\n [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0\n [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1\n [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2\n [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100)\n [DEBUG netavark::firewall::firewalld] Adding firewalld rules for network 10.89.0.0/24\n [DEBUG netavark::firewall::firewalld] Adding subnet 10.89.0.0/24 to zone trusted as source\n [INFO netavark::firewall::nft] Creating container chain nv_51bfc940_10_89_0_0_nm24\n [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1\n [DEBUG netavark::dns::aardvark] Spawning aardvark server\n [DEBUG netavark::dns::aardvark] start aardvark-dns: [\"systemd-run\", \"-q\", \"--scope\", \"/usr/libexec/podman/aardvark-dns\", \"--config\", \"/run/containers/networks/aardvark-dns\", \"-p\", \"53\", \"run\"]\n [DEBUG netavark::commands::setup] {\n \"podman-default-kube-network\": StatusBlock {\n dns_search_domains: Some(\n [\n \"dns.podman\",\n ],\n ),\n dns_server_ips: Some(\n [\n 10.89.0.1,\n ],\n ),\n interfaces: Some(\n {\n \"eth0\": NetInterface {\n mac_address: \"ce:aa:8c:aa:62:92\",\n subnets: Some(\n [\n NetAddress {\n gateway: Some(\n 10.89.0.1,\n ),\n ipnet: 10.89.0.2/24,\n },\n ],\n ),\n },\n },\n ),\n },\n }\n [DEBUG netavark::commands::setup] Setup complete\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Setting Cgroups for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 to machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice:libpod:a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Workdir \\\"/\\\" resolved to host path \\\"/var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/rootfs/merge\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created OCI spec for container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 at /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/config.json\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 -u a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata -p /run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/pidfile -n 2dbe050d31da-infra --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307]\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice and unitName libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Received: 31426\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Got Conmon PID as 31424\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 in OCI runtime\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding nameserver(s) from network status of '[\\\"10.89.0.1\\\"]'\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Adding search domain(s) from network status of '[\\\"dns.podman\\\"]'\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Starting container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 with command [/catatonit -P]\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Started container a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"overlay: mount_data=lowerdir=/var/lib/containers/storage/overlay/l/66KUKQ3YMIWXUMPTPGDU24SJUU,upperdir=/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/diff,workdir=/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/work,nodev,metacopy=on,context=\\\"system_u:object_r:container_file_t:s0:c198,c290\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Mounted container \\\"f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\\\" at \\\"/var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/merged\\\"\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created root filesystem for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b at /var/lib/containers/storage/overlay/e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856/merged\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Setting Cgroups for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b to machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice:libpod:f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"reading hooks from /usr/share/containers/oci/hooks.d\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Workdir \\\"/var/www\\\" resolved to a volume or mount\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created OCI spec for container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b at /var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/config.json\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice for parent machine.slice and name libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"/usr/bin/conmon messages will be logged to syslog\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"running conmon: /usr/bin/conmon\" args=\"[--api-version 1 -c f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b -u f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b -r /usr/bin/crun -b /var/lib/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata -p /run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/pidfile -n httpd2-httpd2 --exit-dir /run/libpod/exits --persist-dir /run/libpod/persist/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b --full-attach -s -l journald --log-level debug --syslog --conmon-pidfile /run/containers/storage/overlay-containers/f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /var/lib/containers/storage --exit-command-arg --runroot --exit-command-arg /run/containers/storage --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/libpod --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /var/lib/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --storage-opt --exit-command-arg overlay.mountopt=nodev,metacopy=on --exit-command-arg --events-backend --exit-command-arg journald --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b]\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Running conmon under slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice and unitName libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Received: 31431\"\n time=\"2025-07-07T20:12:05-04:00\" level=info msg=\"Got Conmon PID as 31429\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Created container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b in OCI runtime\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Starting container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b with command [/bin/busybox-extras httpd -f -p 80]\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Started container f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Called kube.PersistentPostRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-07T20:12:05-04:00\" level=debug msg=\"Shutting down engines\"\nJul 07 20:12:05 managed-node2 python3.12[31359]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 07 20:12:06 managed-node2 python3.12[31587]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:12:06 managed-node2 systemd[1]: Reload requested from client PID 31588 ('systemctl') (unit session-5.scope)...\nJul 07 20:12:06 managed-node2 systemd[1]: Reloading...\nJul 07 20:12:06 managed-node2 systemd-rc-local-generator[31626]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:06 managed-node2 systemd[1]: Reloading finished in 201 ms.\nJul 07 20:12:07 managed-node2 python3.12[31800]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None\nJul 07 20:12:07 managed-node2 systemd[1]: Reload requested from client PID 31803 ('systemctl') (unit session-5.scope)...\nJul 07 20:12:07 managed-node2 systemd[1]: Reloading...\nJul 07 20:12:07 managed-node2 systemd-rc-local-generator[31851]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:07 managed-node2 systemd[1]: Reloading finished in 212 ms.\nJul 07 20:12:08 managed-node2 python3.12[32015]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:12:08 managed-node2 systemd[1]: Created slice system-podman\\x2dkube.slice - Slice /system/podman-kube.\n\u2591\u2591 Subject: A start job for unit system-podman\\x2dkube.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit system-podman\\x2dkube.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2088.\nJul 07 20:12:08 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2087.\nJul 07 20:12:08 managed-node2 podman[32019]: 2025-07-07 20:12:08.118637911 -0400 EDT m=+0.022713444 pod stop 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2)\nJul 07 20:12:15 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 podman[32019]: time=\"2025-07-07T20:12:18-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:12:18 managed-node2 systemd[1]: libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : container 31431 exited with status 137\nJul 07 20:12:18 managed-node2 conmon[31429]: conmon f746539c9d2d9f94203a : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice/libpod-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope/container/memory.events\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.143617522 -0400 EDT m=+10.047693144 container died f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b)\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using transient store: false\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Initializing event backend journald\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay-e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-e8c9dde19cac00bd104a4e3d3dd64a1068105ca06f84455265a9c030fe460856-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.191389905 -0400 EDT m=+10.095465414 container cleanup f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b)\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Shutting down engines\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32031]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=32031\nJul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 systemd[1]: libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 systemd[1]: libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 conmon[31424]: conmon a6323a04a97cb21c1b5f : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice/libpod-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope/container/memory.events\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.211877781 -0400 EDT m=+10.115953439 container died a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra)\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Called cleanup.PersistentPreRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307)\"\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:12:18 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:12:18 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Setting custom database backend: \\\"sqlite\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Using sqlite as database backend\"\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: Received SIGHUP\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: Successfully parsed config\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: Listen v4 ip {}\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: Listen v6 ip {}\nJul 07 20:12:18 managed-node2 aardvark-dns[31419]: No configuration found stopping the sever\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using graph driver overlay\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using transient store: false\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Initializing event backend journald\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\nJul 07 20:12:18 managed-node2 systemd[1]: run-p31412-i31712.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p31412-i31712.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Setting parallel job count to 7\"\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.2554] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:12:18 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2172.\nJul 07 20:12:18 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2172.\nJul 07 20:12:18 managed-node2 systemd[1]: run-netns-netns\\x2d1ce7c5df\\x2d883b\\x2d4dd4\\x2de4c5\\x2d5e8b3ad8b1f6.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d1ce7c5df\\x2d883b\\x2d4dd4\\x2de4c5\\x2d5e8b3ad8b1f6.mount has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.329738431 -0400 EDT m=+10.233814039 container cleanup a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Called cleanup.PersistentPostRunE(/usr/bin/podman --root /var/lib/containers/storage --runroot /run/containers/storage --log-level debug --cgroup-manager systemd --tmpdir /run/libpod --network-config-dir --network-backend netavark --volumepath /var/lib/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --storage-opt overlay.mountopt=nodev,metacopy=on --events-backend journald --syslog container cleanup --stopped-only a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307)\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=debug msg=\"Shutting down engines\"\nJul 07 20:12:18 managed-node2 /usr/bin/podman[32043]: time=\"2025-07-07T20:12:18-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=32043\nJul 07 20:12:18 managed-node2 systemd[1]: libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has successfully entered the 'dead' state.\nJul 07 20:12:18 managed-node2 systemd[1]: Stopped libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope.\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307.scope has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2252 and the job result is done.\nJul 07 20:12:18 managed-node2 systemd[1]: Removed slice machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice - cgroup machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2251 and the job result is done.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.395138747 -0400 EDT m=+10.299214275 container remove f746539c9d2d9f94203ac5a83fbba387808d1efe819beb16d465b5bacf73333b (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.421655838 -0400 EDT m=+10.325731365 container remove a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307 (image=, name=2dbe050d31da-infra, pod_id=2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b)\nJul 07 20:12:18 managed-node2 systemd[1]: machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice: Failed to open /run/systemd/transient/machine-libpod_pod_2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b.slice: No such file or directory\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.430187452 -0400 EDT m=+10.334262942 pod remove 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b (image=, name=httpd2)\nJul 07 20:12:18 managed-node2 podman[32019]: Pods stopped:\nJul 07 20:12:18 managed-node2 podman[32019]: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\nJul 07 20:12:18 managed-node2 podman[32019]: Pods removed:\nJul 07 20:12:18 managed-node2 podman[32019]: 2dbe050d31da179555f7b45a6c00e30614b0182a573477b97b638cd93d18d11b\nJul 07 20:12:18 managed-node2 podman[32019]: Secrets removed:\nJul 07 20:12:18 managed-node2 podman[32019]: Volumes removed:\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.430564473 -0400 EDT m=+10.334640043 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.451338654 -0400 EDT m=+10.355414196 container create 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 systemd[1]: Created slice machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice - cgroup machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2253.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.492350688 -0400 EDT m=+10.396426207 container create 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.498774654 -0400 EDT m=+10.402850152 pod create 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.52421506 -0400 EDT m=+10.428290660 container create 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.524550628 -0400 EDT m=+10.428626163 container restart 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 systemd[1]: Started libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2259.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.500507692 -0400 EDT m=+10.404583357 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.584365043 -0400 EDT m=+10.488440589 container init 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.587275237 -0400 EDT m=+10.491350731 container start 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:12:18 managed-node2 kernel: veth0: entered allmulticast mode\nJul 07 20:12:18 managed-node2 kernel: veth0: entered promiscuous mode\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered blocking state\nJul 07 20:12:18 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state\nJul 07 20:12:18 managed-node2 (udev-worker)[32052]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:18 managed-node2 (udev-worker)[32051]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6102] device (podman1): carrier: link connected\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6105] manager: (podman1): new Bridge device (/org/freedesktop/NetworkManager/Devices/5)\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6153] device (veth0): carrier: link connected\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6156] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6)\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6318] device (podman1): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6330] device (podman1): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6372] device (podman1): Activation: starting connection 'podman1' (d4ed169d-27e3-42b9-8610-eac77be55153)\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6374] device (podman1): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6376] device (podman1): state change: prepare -> config (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6377] device (podman1): state change: config -> ip-config (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6380] device (podman1): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6494] device (podman1): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6511] device (podman1): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nJul 07 20:12:18 managed-node2 NetworkManager[714]: [1751933538.6517] device (podman1): Activation: successful, device activated.\nJul 07 20:12:18 managed-node2 systemd[1]: Started run-p32110-i32410.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run.\n\u2591\u2591 Subject: A start job for unit run-p32110-i32410.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit run-p32110-i32410.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2265.\nJul 07 20:12:18 managed-node2 systemd[1]: Started libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2271.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.743416519 -0400 EDT m=+10.647492104 container init 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.745567921 -0400 EDT m=+10.649643561 container start 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:12:18 managed-node2 systemd[1]: Started libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2278.\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.78342779 -0400 EDT m=+10.687503327 container init 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.786203333 -0400 EDT m=+10.690278905 container start 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:18 managed-node2 podman[32019]: 2025-07-07 20:12:18.792190652 -0400 EDT m=+10.696266270 pod start 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2)\nJul 07 20:12:18 managed-node2 podman[32019]: Pod:\nJul 07 20:12:18 managed-node2 podman[32019]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98\nJul 07 20:12:18 managed-node2 podman[32019]: Container:\nJul 07 20:12:18 managed-node2 podman[32019]: 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd\nJul 07 20:12:18 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2087.\nJul 07 20:12:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-a6323a04a97cb21c1b5fafab9885d37aa8d8a53c5bb2ba2fe133c8fe21ffc307-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:19 managed-node2 python3.12[32279]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:20 managed-node2 python3.12[32436]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:21 managed-node2 python3.12[32592]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:22 managed-node2 python3.12[32747]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:23 managed-node2 podman[32933]: 2025-07-07 20:12:23.116451069 -0400 EDT m=+0.400655980 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:23 managed-node2 python3.12[33102]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:23 managed-node2 python3.12[33257]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:24 managed-node2 python3.12[33412]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:12:24 managed-node2 python3.12[33537]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/httpd3.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933544.127541-13777-126359990981212/.source.yml _original_basename=.mz8q_k1v follow=False checksum=c8e8f54a2e9107a24008cfb6f1d2d59b89d86a42 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:25 managed-node2 python3.12[33692]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.227950329 -0400 EDT m=+0.015681172 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:12:25 managed-node2 systemd[1]: Created slice machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice - cgroup machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2285.\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.26795522 -0400 EDT m=+0.055686154 container create 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.274184949 -0400 EDT m=+0.061915786 pod create 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.300940456 -0400 EDT m=+0.088671322 container create bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered blocking state\nJul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:12:25 managed-node2 kernel: veth1: entered allmulticast mode\nJul 07 20:12:25 managed-node2 kernel: veth1: entered promiscuous mode\nJul 07 20:12:25 managed-node2 NetworkManager[714]: [1751933545.3256] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/7)\nJul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered blocking state\nJul 07 20:12:25 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.275812121 -0400 EDT m=+0.063543116 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:25 managed-node2 (udev-worker)[33710]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:25 managed-node2 NetworkManager[714]: [1751933545.3293] device (veth1): carrier: link connected\nJul 07 20:12:25 managed-node2 systemd[1]: Started libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2291.\nJul 07 20:12:25 managed-node2 systemd[1]: Started libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2298.\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.421199278 -0400 EDT m=+0.208930271 container init 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.423916963 -0400 EDT m=+0.211647873 container start 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:25 managed-node2 systemd[1]: Started libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope.\n\u2591\u2591 Subject: A start job for unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2305.\nJul 07 20:12:25 managed-node2 systemd[1]: Started libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2312.\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.47967795 -0400 EDT m=+0.267408864 container init bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.482134977 -0400 EDT m=+0.269865956 container start bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:25 managed-node2 podman[33699]: 2025-07-07 20:12:25.48810857 -0400 EDT m=+0.275839437 pod start 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:26 managed-node2 python3.12[33903]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:12:26 managed-node2 systemd[1]: Reload requested from client PID 33904 ('systemctl') (unit session-5.scope)...\nJul 07 20:12:26 managed-node2 systemd[1]: Reloading...\nJul 07 20:12:26 managed-node2 systemd-rc-local-generator[33949]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:26 managed-node2 systemd[1]: Reloading finished in 216 ms.\nJul 07 20:12:26 managed-node2 python3.12[34116]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system enabled=True daemon_reload=False daemon_reexec=False no_block=False state=None force=None masked=None\nJul 07 20:12:26 managed-node2 systemd[1]: Reload requested from client PID 34119 ('systemctl') (unit session-5.scope)...\nJul 07 20:12:26 managed-node2 systemd[1]: Reloading...\nJul 07 20:12:27 managed-node2 systemd-rc-local-generator[34169]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:27 managed-node2 systemd[1]: Reloading finished in 222 ms.\nJul 07 20:12:27 managed-node2 python3.12[34331]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:12:27 managed-node2 systemd[1]: Starting podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2319.\nJul 07 20:12:27 managed-node2 podman[34335]: 2025-07-07 20:12:27.799634553 -0400 EDT m=+0.024842125 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:28 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: time=\"2025-07-07T20:12:37-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:12:37 managed-node2 systemd[1]: libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.830033292 -0400 EDT m=+10.055241268 container died bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-404f57844fa9dde2639f08876faa04d4c046f22836d60e203fb44096347d56de-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-404f57844fa9dde2639f08876faa04d4c046f22836d60e203fb44096347d56de-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.871361891 -0400 EDT m=+10.096569436 container cleanup bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test)\nJul 07 20:12:37 managed-node2 systemd[1]: libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 systemd[1]: libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.891562873 -0400 EDT m=+10.116770720 container died 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra)\nJul 07 20:12:37 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:12:37 managed-node2 kernel: veth1 (unregistering): left allmulticast mode\nJul 07 20:12:37 managed-node2 kernel: veth1 (unregistering): left promiscuous mode\nJul 07 20:12:37 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:12:37 managed-node2 systemd[1]: run-netns-netns\\x2d9f683135\\x2dcb1d\\x2d6825\\x2d135b\\x2df344c71f6412.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d9f683135\\x2dcb1d\\x2d6825\\x2d135b\\x2df344c71f6412.mount has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.960143035 -0400 EDT m=+10.185350606 container cleanup 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:37 managed-node2 systemd[1]: Stopping libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope...\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2405.\nJul 07 20:12:37 managed-node2 systemd[1]: libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has successfully entered the 'dead' state.\nJul 07 20:12:37 managed-node2 systemd[1]: Stopped libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope.\n\u2591\u2591 Subject: A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit libpod-conmon-5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6.scope has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2405 and the job result is done.\nJul 07 20:12:37 managed-node2 systemd[1]: Removed slice machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice - cgroup machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2404 and the job result is done.\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.968733882 -0400 EDT m=+10.193941424 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:37 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.973070119 -0400 EDT m=+10.198277748 pod stop 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:37 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory\nJul 07 20:12:37 managed-node2 podman[34335]: 2025-07-07 20:12:37.999456374 -0400 EDT m=+10.224663949 container remove bab0ae8ebcb66c08f684eea98f187e3a33f7804bc4793f319f3e33d109a58463 (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.025619131 -0400 EDT m=+10.250826702 container remove 5204fa3ee7b7d231f261ed715e4af5a2ff156342851d65e1c4d5bf7ba3275df6 (image=, name=3340ce26176a-infra, pod_id=3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94)\nJul 07 20:12:38 managed-node2 systemd[1]: machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: Failed to open /run/systemd/transient/machine-libpod_pod_3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94.slice: No such file or directory\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.034330595 -0400 EDT m=+10.259538132 pod remove 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94 (image=, name=httpd3)\nJul 07 20:12:38 managed-node2 podman[34335]: Pods stopped:\nJul 07 20:12:38 managed-node2 podman[34335]: 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94\nJul 07 20:12:38 managed-node2 podman[34335]: Pods removed:\nJul 07 20:12:38 managed-node2 podman[34335]: 3340ce26176a73729930bd2117f60723a0073f6c0a68b141a34c323546464d94\nJul 07 20:12:38 managed-node2 podman[34335]: Secrets removed:\nJul 07 20:12:38 managed-node2 podman[34335]: Volumes removed:\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.034440895 -0400 EDT m=+10.259648519 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.055160997 -0400 EDT m=+10.280368656 container create f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 systemd[1]: Created slice machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice - cgroup machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2406.\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.089857565 -0400 EDT m=+10.315065108 container create afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.096374067 -0400 EDT m=+10.321581608 pod create f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.121708454 -0400 EDT m=+10.346916101 container create 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.122052483 -0400 EDT m=+10.347260058 container restart f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 systemd[1]: Started libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2412.\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.09796996 -0400 EDT m=+10.323177689 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.161037652 -0400 EDT m=+10.386245273 container init f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.163287704 -0400 EDT m=+10.388495346 container start f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered blocking state\nJul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:12:38 managed-node2 kernel: veth1: entered allmulticast mode\nJul 07 20:12:38 managed-node2 kernel: veth1: entered promiscuous mode\nJul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered blocking state\nJul 07 20:12:38 managed-node2 kernel: podman1: port 2(veth1) entered forwarding state\nJul 07 20:12:38 managed-node2 (udev-worker)[34367]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:12:38 managed-node2 NetworkManager[714]: [1751933558.1845] manager: (veth1): new Veth device (/org/freedesktop/NetworkManager/Devices/8)\nJul 07 20:12:38 managed-node2 NetworkManager[714]: [1751933558.1884] device (veth1): carrier: link connected\nJul 07 20:12:38 managed-node2 systemd[1]: Started libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2418.\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.256569895 -0400 EDT m=+10.481777617 container init afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.259020833 -0400 EDT m=+10.484228554 container start afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:12:38 managed-node2 systemd[1]: Started libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2425.\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.305507767 -0400 EDT m=+10.530715363 container init 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.307690208 -0400 EDT m=+10.532897848 container start 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:38 managed-node2 podman[34335]: 2025-07-07 20:12:38.313601662 -0400 EDT m=+10.538809232 pod start f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:12:38 managed-node2 podman[34335]: Pod:\nJul 07 20:12:38 managed-node2 podman[34335]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f\nJul 07 20:12:38 managed-node2 podman[34335]: Container:\nJul 07 20:12:38 managed-node2 podman[34335]: 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e\nJul 07 20:12:38 managed-node2 systemd[1]: Started podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2319.\nJul 07 20:12:39 managed-node2 sudo[34620]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-pxlwmudvcyywtlojiblkxiobcxywcxdd ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933558.7922473-14198-135957250003670/AnsiballZ_command.py'\nJul 07 20:12:39 managed-node2 sudo[34620]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:12:39 managed-node2 python3.12[34623]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:39 managed-node2 systemd[27434]: Started podman-34631.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 112.\nJul 07 20:12:39 managed-node2 sudo[34620]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:12:39 managed-node2 python3.12[34794]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:40 managed-node2 python3.12[34957]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:40 managed-node2 sudo[35170]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-edeafswnzssmkzeqkidxujvilmmhsinz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933560.1733143-14258-164223741543922/AnsiballZ_command.py'\nJul 07 20:12:40 managed-node2 sudo[35170]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:12:40 managed-node2 python3.12[35173]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:40 managed-node2 sudo[35170]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:12:40 managed-node2 python3.12[35331]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:41 managed-node2 python3.12[35489]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:42 managed-node2 python3.12[35647]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:42 managed-node2 python3.12[35804]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:43 managed-node2 python3.12[35960]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd1-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:43 managed-node2 python3.12[36116]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd2-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:43 managed-node2 python3.12[36272]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /tmp/lsr_m03kfbaj_podman/httpd3-create _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:46 managed-node2 python3.12[36583]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:47 managed-node2 python3.12[36744]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:50 managed-node2 python3.12[36901]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:12:51 managed-node2 python3.12[37057]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None\nJul 07 20:12:51 managed-node2 python3.12[37214]: ansible-ansible.legacy.systemd Invoked with name=firewalld enabled=True state=started daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None\nJul 07 20:12:52 managed-node2 python3.12[37371]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True online=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] includes=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None\nJul 07 20:12:54 managed-node2 python3.12[37526]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:12:55 managed-node2 python3.12[37683]: ansible-ansible.legacy.dnf Invoked with name=['grubby'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:12:55 managed-node2 python3.12[37839]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None\nJul 07 20:12:56 managed-node2 python3.12[37996]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d\nJul 07 20:12:58 managed-node2 python3.12[38193]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True\nJul 07 20:12:59 managed-node2 python3.12[38348]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked\nJul 07 20:13:03 managed-node2 python3.12[38503]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 07 20:13:03 managed-node2 python3.12[38659]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:04 managed-node2 python3.12[38817]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:04 managed-node2 python3.12[38973]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:05 managed-node2 python3.12[39129]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:06 managed-node2 python3.12[39285]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 07 20:13:07 managed-node2 python3.12[39440]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:07 managed-node2 python3.12[39595]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:07 managed-node2 sudo[39800]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-sggfuwotsbnqelqoxqdpnxcztrgpfxov ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933587.546744-15228-221616196309386/AnsiballZ_podman_image.py'\nJul 07 20:13:07 managed-node2 sudo[39800]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:07 managed-node2 systemd[27434]: Started podman-39804.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 116.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39811.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 120.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39818.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 124.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39825.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 128.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39832.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 132.\nJul 07 20:13:08 managed-node2 systemd[27434]: Started podman-39839.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 136.\nJul 07 20:13:08 managed-node2 sudo[39800]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:08 managed-node2 python3.12[40000]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:09 managed-node2 python3.12[40157]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:09 managed-node2 python3.12[40312]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:10 managed-node2 python3.12[40390]: ansible-ansible.legacy.file Invoked with owner=podman_basic_user group=3001 mode=0644 dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _original_basename=.g46gggh2 recurse=False state=file path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:10 managed-node2 sudo[40595]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lwtednmiooezcolvhoaprqmyaqljtcnz ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933590.280253-15331-51302432010459/AnsiballZ_podman_play.py'\nJul 07 20:13:10 managed-node2 sudo[40595]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:13:10 managed-node2 systemd[27434]: Started podman-40605.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 140.\nJul 07 20:13:10 managed-node2 systemd[27434]: Created slice user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice - cgroup user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 144.\nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-07T20:13:10-04:00\" level=info msg=\"/bin/podman filtering at log level debug\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-07T20:13:10-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"systemd-logind: Unknown object '/'.\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using graph root /home/podman_basic_user/.local/share/containers/storage\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using run root /run/user/3001/containers\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using tmp dir /run/user/3001/libpod/tmp\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Cached value indicated that metacopy is not being used\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Cached value indicated that native-diff is usable\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Initializing event backend file\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2025-07-07T20:13:10-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 8ffa2dc6ff76aec6ab19c0699b0508615d79e3b6d14dd7cd78be0f62c7718f3e bridge podman1 2025-07-07 20:11:45.408967969 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Created cgroup path user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice for parent user.slice and name libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Created cgroup user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice\"\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice\"\n Error: adding pod to state: name \"httpd1\" is in use: pod already exists\n time=\"2025-07-07T20:13:10-04:00\" level=debug msg=\"Shutting down engines\"\n time=\"2025-07-07T20:13:10-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=40605\nJul 07 20:13:10 managed-node2 python3.12[40598]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 07 20:13:10 managed-node2 sudo[40595]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:11 managed-node2 python3.12[40767]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:13:12 managed-node2 python3.12[40923]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:13 managed-node2 python3.12[41080]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:14 managed-node2 python3.12[41236]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:14 managed-node2 python3.12[41391]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd2-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:15 managed-node2 podman[41576]: 2025-07-07 20:13:15.556653463 -0400 EDT m=+0.319976011 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:13:15 managed-node2 python3.12[41746]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:16 managed-node2 python3.12[41903]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:16 managed-node2 python3.12[42058]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:17 managed-node2 python3.12[42136]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd2.yml _original_basename=.cnp2b107 recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd2.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:13:17 managed-node2 podman[42298]: 2025-07-07 20:13:17.610100559 -0400 EDT m=+0.016705745 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:13:17 managed-node2 systemd[1]: Created slice machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice - cgroup machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2432.\nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: \nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"/usr/bin/podman filtering at log level debug\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Called kube.PersistentPreRunE(/usr/bin/podman play kube --start=true --log-level=debug /etc/containers/ansible-kubernetes.d/httpd2.yml)\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using conmon: \\\"/usr/bin/conmon\\\"\"\n time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"Using sqlite as database backend\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using graph driver overlay\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using graph root /var/lib/containers/storage\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using run root /run/containers/storage\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using static dir /var/lib/containers/storage/libpod\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using tmp dir /run/libpod\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using volume path /var/lib/containers/storage/volumes\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using transient store: false\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"[graphdriver] trying provided driver \\\"overlay\\\"\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Cached value indicated that overlay is supported\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Cached value indicated that metacopy is being used\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Cached value indicated that native-diff is not being used\"\n time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"Not using native diff for overlay, this may cause degraded performance for building images: kernel has CONFIG_OVERLAY_FS_REDIRECT_DIR enabled\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"backingFs=xfs, projectQuotaSupported=false, useNativeDiff=false, usingMetacopy=true\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Initializing event backend journald\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Using OCI runtime \\\"/usr/bin/crun\\\"\"\n time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"Setting parallel job count to 7\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Successfully loaded network podman-default-kube-network: &{podman-default-kube-network 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba bridge podman1 2025-07-07 20:10:03.41385383 -0400 EDT [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Successfully loaded 2 networks\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Pod using bridge network mode\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Created cgroup path machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice for parent machine.slice and name libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Created cgroup machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice\"\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Got pod cgroup as machine.slice/machine-libpod_pod_2ea33668b0af793273e3623e62c3ffdd76c020f8fc5e4ca24a2cf88871bd78c9.slice\"\n Error: adding pod to state: name \"httpd2\" is in use: pod already exists\n time=\"2025-07-07T20:13:17-04:00\" level=debug msg=\"Shutting down engines\"\n time=\"2025-07-07T20:13:17-04:00\" level=info msg=\"Received shutdown.Stop(), terminating!\" PID=42298\nJul 07 20:13:17 managed-node2 python3.12[42291]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125\nJul 07 20:13:18 managed-node2 python3.12[42459]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:20 managed-node2 python3.12[42616]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:20 managed-node2 python3.12[42772]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:21 managed-node2 python3.12[42927]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:22 managed-node2 podman[43112]: 2025-07-07 20:13:22.329599025 -0400 EDT m=+0.443555601 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:13:22 managed-node2 python3.12[43281]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:23 managed-node2 python3.12[43438]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:23 managed-node2 python3.12[43593]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:24 managed-node2 python3.12[43671]: ansible-ansible.legacy.file Invoked with owner=root group=0 mode=0644 dest=/etc/containers/ansible-kubernetes.d/httpd3.yml _original_basename=.garaxq8q recurse=False state=file path=/etc/containers/ansible-kubernetes.d/httpd3.yml force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:24 managed-node2 python3.12[43826]: ansible-containers.podman.podman_play Invoked with state=started kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:13:24 managed-node2 podman[43834]: 2025-07-07 20:13:24.584154544 -0400 EDT m=+0.014632770 network create 51bfc940b19ad4beb17f3742cd6e288573909333b80c0d3502c69042d548f5ba (name=podman-default-kube-network, type=bridge)\nJul 07 20:13:24 managed-node2 systemd[1]: Created slice machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice - cgroup machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_67cb0317b3ba107c878830829600db1465fe0ef7f2ddfd39ed24f0099866fcc0.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2438.\nJul 07 20:13:25 managed-node2 sudo[44045]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-fkixybyzrrzuqcjcfmgozxwfwmajookd ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933605.3310475-16168-26421251595513/AnsiballZ_command.py'\nJul 07 20:13:25 managed-node2 sudo[44045]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:25 managed-node2 python3.12[44048]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd1 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:25 managed-node2 systemd[27434]: Started podman-44056.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 148.\nJul 07 20:13:25 managed-node2 sudo[44045]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:26 managed-node2 python3.12[44220]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd2 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:26 managed-node2 python3.12[44383]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect httpd3 --format '{{.State}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:26 managed-node2 sudo[44596]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ydnvtlhbpgwizyiplpnqjwanppjzhlbg ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933606.7243044-16231-16521388663911/AnsiballZ_command.py'\nJul 07 20:13:26 managed-node2 sudo[44596]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:27 managed-node2 python3.12[44599]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:27 managed-node2 sudo[44596]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:27 managed-node2 python3.12[44757]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:27 managed-node2 python3.12[44915]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:28 managed-node2 python3.12[45073]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15001/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:28 managed-node2 python3.12[45229]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15002/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:29 managed-node2 python3.12[45385]: ansible-ansible.legacy.uri Invoked with url=http://localhost:15003/index.txt return_content=True force=False http_agent=ansible-httpget use_proxy=True validate_certs=True force_basic_auth=False use_gssapi=False body_format=raw method=GET follow_redirects=safe status_code=[200] timeout=30 headers={} remote_src=False unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None dest=None body=None src=None creates=None removes=None unix_socket=None ca_path=None ciphers=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:31 managed-node2 python3.12[45696]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:32 managed-node2 python3.12[45857]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:36 managed-node2 python3.12[46014]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 07 20:13:36 managed-node2 python3.12[46170]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:37 managed-node2 python3.12[46327]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:37 managed-node2 python3.12[46483]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:38 managed-node2 python3.12[46639]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:39 managed-node2 python3.12[46795]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:39 managed-node2 sudo[47002]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-cdbeefpvsgoajxqmxzaiihkscmhhxllh ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933619.5337389-16868-45510891100291/AnsiballZ_systemd.py'\nJul 07 20:13:39 managed-node2 sudo[47002]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:40 managed-node2 python3.12[47005]: ansible-systemd Invoked with name=podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service scope=user state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:13:40 managed-node2 systemd[27434]: Reload requested from client PID 47008 ('systemctl')...\nJul 07 20:13:40 managed-node2 systemd[27434]: Reloading...\nJul 07 20:13:40 managed-node2 systemd[27434]: Reloading finished in 62 ms.\nJul 07 20:13:40 managed-node2 systemd[27434]: Stopping podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 152.\nJul 07 20:13:50 managed-node2 podman[47019]: time=\"2025-07-07T20:13:50-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd1-httpd1 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:13:50 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:13:50 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:13:50 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:13:50 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:13:50 managed-node2 systemd[27434]: Removed slice user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice - cgroup user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 153 and the job result is done.\nJul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory\nJul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory\nJul 07 20:13:50 managed-node2 systemd[27434]: user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: Failed to open /run/user/3001/systemd/transient/user-libpod_pod_96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6.slice: No such file or directory\nJul 07 20:13:50 managed-node2 podman[47019]: Pods stopped:\nJul 07 20:13:50 managed-node2 podman[47019]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6\nJul 07 20:13:50 managed-node2 podman[47019]: Pods removed:\nJul 07 20:13:50 managed-node2 podman[47019]: 96374ca7edf38cefe68178fa42a2c934aac06df7968e1192c86373839d58bbf6\nJul 07 20:13:50 managed-node2 podman[47019]: Secrets removed:\nJul 07 20:13:50 managed-node2 podman[47019]: Volumes removed:\nJul 07 20:13:50 managed-node2 systemd[27434]: Stopped podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 152 and the job result is done.\nJul 07 20:13:50 managed-node2 systemd[27434]: podman-kube@-home-podman_basic_user-.config-containers-ansible\\x2dkubernetes.d-httpd1.yml.service: Consumed 589ms CPU time, 74.3M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT completed and consumed the indicated resources.\nJul 07 20:13:50 managed-node2 sudo[47002]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:50 managed-node2 python3.12[47222]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:51 managed-node2 sudo[47429]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ragjadshxwcsmhyabehcrpjvrodecwop ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933631.1425395-17190-192702739809308/AnsiballZ_podman_play.py'\nJul 07 20:13:51 managed-node2 sudo[47429]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play version: 5.5.1, kube file /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 07 20:13:51 managed-node2 systemd[27434]: Started podman-47439.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 154.\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman kube play --down /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 07 20:13:51 managed-node2 python3.12[47432]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 07 20:13:51 managed-node2 sudo[47429]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:13:52 managed-node2 python3.12[47600]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:53 managed-node2 python3.12[47755]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:13:53 managed-node2 python3.12[47911]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:54 managed-node2 python3.12[48068]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:55 managed-node2 python3.12[48224]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:13:55 managed-node2 systemd[1]: Reload requested from client PID 48227 ('systemctl') (unit session-5.scope)...\nJul 07 20:13:55 managed-node2 systemd[1]: Reloading...\nJul 07 20:13:55 managed-node2 systemd-rc-local-generator[48270]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:13:55 managed-node2 systemd[1]: Reloading finished in 218 ms.\nJul 07 20:13:55 managed-node2 systemd[1]: Stopping podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2445.\nJul 07 20:13:56 managed-node2 podman[48286]: 2025-07-07 20:13:56.040685373 -0400 EDT m=+0.023115658 pod stop 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2)\nJul 07 20:14:06 managed-node2 podman[48286]: time=\"2025-07-07T20:14:06-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd2-httpd2 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:14:06 managed-node2 systemd[1]: libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd.scope has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.074195184 -0400 EDT m=+10.056625730 container died 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay-9fa181bdd3e6904e4a3e75e30d4505da5f0cd638ecd007c086bea8376c79fc52-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-9fa181bdd3e6904e4a3e75e30d4505da5f0cd638ecd007c086bea8376c79fc52-merged.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.11235352 -0400 EDT m=+10.094783772 container cleanup 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:14:06 managed-node2 systemd[1]: libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353.scope has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.131606724 -0400 EDT m=+10.114044140 container died 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:14:06 managed-node2 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:14:06 managed-node2 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:14:06 managed-node2 kernel: podman1: port 1(veth0) entered disabled state\nJul 07 20:14:06 managed-node2 systemd[1]: run-netns-netns\\x2d82249bc3\\x2db382\\x2d7b9a\\x2d81bc\\x2d86e8308d188a.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d82249bc3\\x2db382\\x2d7b9a\\x2d81bc\\x2d86e8308d188a.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.202757575 -0400 EDT m=+10.185187931 container cleanup 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 systemd[1]: Removed slice machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice - cgroup machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2446 and the job result is done.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.23022593 -0400 EDT m=+10.212656217 container remove 0e14db776822f1e8ce753c4fc97ccc314e8d1788edb850fe194ded4bbc9061cd (image=quay.io/libpod/testimage:20210610, name=httpd2-httpd2, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.25693135 -0400 EDT m=+10.239361641 container remove 09bdab6f89b6e10d9c4b04c02b856b6d0b94744c3a6df47a43c0617274343353 (image=, name=4c70aae5ec24-infra, pod_id=4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 systemd[1]: machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice: Failed to open /run/systemd/transient/machine-libpod_pod_4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98.slice: No such file or directory\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.265332892 -0400 EDT m=+10.247763153 pod remove 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98 (image=, name=httpd2)\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.268022285 -0400 EDT m=+10.250452816 container kill 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 systemd[1]: libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176.scope has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.274842916 -0400 EDT m=+10.257273277 container died 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 podman[48286]: 2025-07-07 20:14:06.336039419 -0400 EDT m=+10.318469707 container remove 6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176 (image=, name=671aefaa976c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service)\nJul 07 20:14:06 managed-node2 podman[48286]: Pods stopped:\nJul 07 20:14:06 managed-node2 podman[48286]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98\nJul 07 20:14:06 managed-node2 podman[48286]: Pods removed:\nJul 07 20:14:06 managed-node2 podman[48286]: 4c70aae5ec2463406ba08c910c64656a60b43e09faf68974182aef83fb430c98\nJul 07 20:14:06 managed-node2 podman[48286]: Secrets removed:\nJul 07 20:14:06 managed-node2 podman[48286]: Volumes removed:\nJul 07 20:14:06 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has successfully entered the 'dead' state.\nJul 07 20:14:06 managed-node2 systemd[1]: Stopped podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2445 and the job result is done.\nJul 07 20:14:06 managed-node2 python3.12[48488]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-6544d580eaccfc8f966ca1e4b0a4065e41ecdeb8a249fa3f351cec356d4c4176-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play Invoked with state=absent debug=True log_level=debug kube_file=/etc/containers/ansible-kubernetes.d/httpd2.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /usr/bin/podman kube play --down /etc/containers/ansible-kubernetes.d/httpd2.yml\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: Pods stopped:\n Pods removed:\n Secrets removed:\n Volumes removed:\nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: \nJul 07 20:14:07 managed-node2 python3.12[48645]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 0\nJul 07 20:14:07 managed-node2 python3.12[48813]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:08 managed-node2 python3.12[48969]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:10 managed-node2 python3.12[49126]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:10 managed-node2 python3.12[49282]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:14:10 managed-node2 systemd[1]: Reload requested from client PID 49285 ('systemctl') (unit session-5.scope)...\nJul 07 20:14:10 managed-node2 systemd[1]: Reloading...\nJul 07 20:14:11 managed-node2 systemd-rc-local-generator[49327]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:14:11 managed-node2 systemd[1]: Reloading finished in 211 ms.\nJul 07 20:14:11 managed-node2 systemd[1]: Stopping podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2447.\nJul 07 20:14:11 managed-node2 podman[49344]: 2025-07-07 20:14:11.206854573 -0400 EDT m=+0.022831781 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:14:21 managed-node2 podman[49344]: time=\"2025-07-07T20:14:21-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container httpd3-httpd3 in 10 seconds, resorting to SIGKILL\"\nJul 07 20:14:21 managed-node2 systemd[1]: libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e.scope has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.240228718 -0400 EDT m=+10.056206048 container died 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test)\nJul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay-350f35f9a3dec1a954b9c8301592ec6c7cff326df9aa3350ca38ff2248bf45f0-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-350f35f9a3dec1a954b9c8301592ec6c7cff326df9aa3350ca38ff2248bf45f0-merged.mount has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.280265572 -0400 EDT m=+10.096242780 container cleanup 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:14:21 managed-node2 systemd[1]: libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e.scope has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.298799022 -0400 EDT m=+10.114776654 container died afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 systemd[1]: run-p32110-i32410.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-p32110-i32410.scope has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:14:21 managed-node2 kernel: veth1 (unregistering): left allmulticast mode\nJul 07 20:14:21 managed-node2 kernel: veth1 (unregistering): left promiscuous mode\nJul 07 20:14:21 managed-node2 kernel: podman1: port 2(veth1) entered disabled state\nJul 07 20:14:21 managed-node2 NetworkManager[714]: [1751933661.3375] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:14:21 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2448.\nJul 07 20:14:21 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2448.\nJul 07 20:14:21 managed-node2 systemd[1]: run-netns-netns\\x2dd8d23001\\x2dccdd\\x2d98d5\\x2d0185\\x2d01ce80e8c916.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2dd8d23001\\x2dccdd\\x2d98d5\\x2d0185\\x2d01ce80e8c916.mount has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.414963852 -0400 EDT m=+10.230941061 container cleanup afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 systemd[1]: Removed slice machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice - cgroup machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2527 and the job result is done.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.422214439 -0400 EDT m=+10.238191678 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.428641853 -0400 EDT m=+10.244619177 pod stop f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.436145908 -0400 EDT m=+10.252123291 container kill f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 systemd[1]: libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0.scope has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.451967497 -0400 EDT m=+10.267945068 container died f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.478053211 -0400 EDT m=+10.294030464 container remove 3a17f3f8fba6244c142fc9f43765997334bee1dc00d1b83754a1564cc37d943e (image=quay.io/libpod/testimage:20210610, name=httpd3-httpd3, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.504395032 -0400 EDT m=+10.320372288 container remove afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e (image=, name=f714ebed6201-infra, pod_id=f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 systemd[1]: machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: Failed to open /run/systemd/transient/machine-libpod_pod_f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f.slice: No such file or directory\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.513441882 -0400 EDT m=+10.329419091 pod remove f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f (image=, name=httpd3)\nJul 07 20:14:21 managed-node2 podman[49397]: 2025-07-07 20:14:21.533507126 -0400 EDT m=+0.087614608 container cleanup f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 podman[49344]: 2025-07-07 20:14:21.560230359 -0400 EDT m=+10.376207602 container remove f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0 (image=, name=c79a1b99881c-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service)\nJul 07 20:14:21 managed-node2 podman[49344]: Pods stopped:\nJul 07 20:14:21 managed-node2 podman[49344]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f\nJul 07 20:14:21 managed-node2 podman[49344]: Pods removed:\nJul 07 20:14:21 managed-node2 podman[49344]: f714ebed62017a739384907cd0b3020b23abbb7a0b18f40bf4b1a09007a6856f\nJul 07 20:14:21 managed-node2 podman[49344]: Secrets removed:\nJul 07 20:14:21 managed-node2 podman[49344]: Volumes removed:\nJul 07 20:14:21 managed-node2 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has successfully entered the 'dead' state.\nJul 07 20:14:21 managed-node2 systemd[1]: Stopped podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service - A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2447 and the job result is done.\nJul 07 20:14:21 managed-node2 python3.12[49562]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-afa3d5633c2b58e365fe304679c1ed0719a5f1cc5fcacdf05d91f2d07cb4a07e-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-f825addb604f6eaf80d0e9d594e3ecf0f1c615599887138ca3ee22f57666fed0-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 python3.12[49720]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/httpd3.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:14:22 managed-node2 python3.12[49720]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/httpd3.yml\nJul 07 20:14:22 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:22 managed-node2 python3.12[49888]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:23 managed-node2 python3.12[50043]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 07 20:14:24 managed-node2 python3.12[50199]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:24 managed-node2 sudo[50407]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-jpalnwprgspnzsnsvsnylrerubrespev ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933664.3660543-18219-69893342173436/AnsiballZ_podman_container_info.py'\nJul 07 20:14:24 managed-node2 sudo[50407]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:24 managed-node2 python3.12[50410]: ansible-containers.podman.podman_container_info Invoked with executable=podman name=None\nJul 07 20:14:24 managed-node2 systemd[27434]: Started podman-50411.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 158.\nJul 07 20:14:24 managed-node2 sudo[50407]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:25 managed-node2 sudo[50623]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-lygojmlyopjnwoxfpskxczdponstloyj ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933664.983715-18245-114379203266243/AnsiballZ_command.py'\nJul 07 20:14:25 managed-node2 sudo[50623]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:25 managed-node2 python3.12[50626]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:25 managed-node2 systemd[27434]: Started podman-50627.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 162.\nJul 07 20:14:25 managed-node2 sudo[50623]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:25 managed-node2 sudo[50839]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-iwlmlwzhyduyhuxfusqoosixqzgzqvsl ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933665.559254-18262-213663586865707/AnsiballZ_command.py'\nJul 07 20:14:25 managed-node2 sudo[50839]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:25 managed-node2 python3.12[50842]: ansible-ansible.legacy.command Invoked with _raw_params=podman secret ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:25 managed-node2 systemd[27434]: Started podman-50843.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 166.\nJul 07 20:14:25 managed-node2 sudo[50839]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:26 managed-node2 python3.12[51004]: ansible-ansible.legacy.command Invoked with removes=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl disable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None stdin=None\nJul 07 20:14:26 managed-node2 systemd[1]: Stopping user@3001.service - User Manager for UID 3001...\n\u2591\u2591 Subject: A stop job for unit user@3001.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user@3001.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2529.\nJul 07 20:14:26 managed-node2 systemd[27434]: Activating special unit exit.target...\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopping podman-pause-8c2d7b35.scope...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 177.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice app-podman\\x2dkube.slice - Slice /app/podman-kube.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 181 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: app-podman\\x2dkube.slice: Consumed 589ms CPU time, 74.4M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT completed and consumed the indicated resources.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice - cgroup user-libpod_pod_25df5f562d1af8490514e3fda02fb55afc382f739a070dbbdd6f399bc06ae96d.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 178 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target default.target - Main User Target.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 191 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target basic.target - Basic System.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 190 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target paths.target - Paths.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 184 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target sockets.target - Sockets.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 187 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped target timers.target - Timers.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 189 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 186 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 185 and the job result is done.\nJul 07 20:14:26 managed-node2 dbus-broker[27991]: Dispatched 2275 messages @ 3(\u00b113)\u03bcs / message.\n\u2591\u2591 Subject: Dispatched 2275 messages\n\u2591\u2591 Defined-By: dbus-broker\n\u2591\u2591 Support: https://groups.google.com/forum/#!forum/bus1-devel\n\u2591\u2591 \n\u2591\u2591 This message is printed by dbus-broker when shutting down. It includes metric\n\u2591\u2591 information collected during the runtime of dbus-broker.\n\u2591\u2591 \n\u2591\u2591 The message lists the number of dispatched messages\n\u2591\u2591 (in this case 2275) as well as the mean time to\n\u2591\u2591 handling a single message. The time measurements exclude the time spent on\n\u2591\u2591 writing to and reading from the kernel.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopping dbus-broker.service - D-Bus User Message Bus...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 174.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped systemd-tmpfiles-setup.service - Create User Files and Directories.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 179 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped podman-pause-8c2d7b35.scope.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 177 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice user.slice - Slice /user.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 176 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Stopped dbus-broker.service - D-Bus User Message Bus.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 174 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice session.slice - User Core Session Slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 175 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Closed dbus.socket - D-Bus User Message Bus Socket.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 180 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: Removed slice app.slice - User Application Slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 182 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[27434]: app.slice: Consumed 615ms CPU time, 74.5M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT completed and consumed the indicated resources.\nJul 07 20:14:26 managed-node2 systemd[27434]: Reached target shutdown.target - Shutdown.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 173.\nJul 07 20:14:26 managed-node2 systemd[27434]: Finished systemd-exit.service - Exit the Session.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 171.\nJul 07 20:14:26 managed-node2 systemd[27434]: Reached target exit.target - Exit the Session.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 170.\nJul 07 20:14:26 managed-node2 systemd-logind[659]: Removed session 6.\n\u2591\u2591 Subject: Session 6 has been terminated\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: sd-login(3)\n\u2591\u2591 \n\u2591\u2591 A session with the ID 6 has been terminated.\nJul 07 20:14:26 managed-node2 systemd[1]: user@3001.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit user@3001.service has successfully entered the 'dead' state.\nJul 07 20:14:26 managed-node2 systemd[1]: Stopped user@3001.service - User Manager for UID 3001.\n\u2591\u2591 Subject: A stop job for unit user@3001.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user@3001.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2529 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[1]: user@3001.service: Consumed 2.005s CPU time, 92.7M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit user@3001.service completed and consumed the indicated resources.\nJul 07 20:14:26 managed-node2 systemd[1]: Stopping user-runtime-dir@3001.service - User Runtime Directory /run/user/3001...\n\u2591\u2591 Subject: A stop job for unit user-runtime-dir@3001.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user-runtime-dir@3001.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2528.\nJul 07 20:14:26 managed-node2 systemd[1]: run-user-3001.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-user-3001.mount has successfully entered the 'dead' state.\nJul 07 20:14:26 managed-node2 systemd[1]: user-runtime-dir@3001.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit user-runtime-dir@3001.service has successfully entered the 'dead' state.\nJul 07 20:14:26 managed-node2 systemd[1]: Stopped user-runtime-dir@3001.service - User Runtime Directory /run/user/3001.\n\u2591\u2591 Subject: A stop job for unit user-runtime-dir@3001.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user-runtime-dir@3001.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2528 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[1]: Removed slice user-3001.slice - User Slice of UID 3001.\n\u2591\u2591 Subject: A stop job for unit user-3001.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit user-3001.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2530 and the job result is done.\nJul 07 20:14:26 managed-node2 systemd[1]: user-3001.slice: Consumed 2.031s CPU time, 92.8M memory peak.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit user-3001.slice completed and consumed the indicated resources.\nJul 07 20:14:26 managed-node2 python3.12[51165]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:29 managed-node2 python3.12[51321]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:31 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:14:31 managed-node2 python3.12[51478]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:33 managed-node2 python3.12[51634]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:36 managed-node2 python3.12[51790]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:38 managed-node2 python3.12[51946]: ansible-ansible.legacy.command Invoked with _raw_params=loginctl show-user --value -p State podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:39 managed-node2 sudo[52152]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tfrixhsqpnpoemptjbnyouggwynzakes ; /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933679.2995527-18658-251091507513885/AnsiballZ_command.py'\nJul 07 20:14:39 managed-node2 sudo[52152]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:39 managed-node2 python3.12[52155]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:39 managed-node2 sudo[52152]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:40 managed-node2 python3.12[52317]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd2 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:40 managed-node2 python3.12[52479]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod exists httpd3 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:40 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:14:40 managed-node2 sudo[52692]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-tblibtrtarfdedjsuypqezvumnboznzr ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933680.662618-18720-8327876079082/AnsiballZ_command.py'\nJul 07 20:14:40 managed-node2 sudo[52692]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0)\nJul 07 20:14:40 managed-node2 python3.12[52695]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --user list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd1[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:41 managed-node2 sudo[52692]: pam_unix(sudo:session): session closed for user podman_basic_user\nJul 07 20:14:41 managed-node2 python3.12[52853]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd2[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:41 managed-node2 python3.12[53011]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail\n systemctl --system list-units -a -l --plain | grep -E '^[ ]*podman-kube@.+-httpd3[.]yml[.]service[ ]+loaded[ ]+active '\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:42 managed-node2 python3.12[53169]: ansible-stat Invoked with path=/var/lib/systemd/linger/podman_basic_user follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:44 managed-node2 python3.12[53479]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:45 managed-node2 python3.12[53640]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:45 managed-node2 python3.12[53796]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:48 managed-node2 python3.12[53953]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None\nJul 07 20:14:48 managed-node2 python3.12[54109]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:49 managed-node2 python3.12[54266]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:49 managed-node2 python3.12[54422]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:50 managed-node2 python3.12[54578]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:51 managed-node2 python3.12[54734]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:51 managed-node2 python3.12[54889]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:52 managed-node2 python3.12[55044]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:53 managed-node2 python3.12[55199]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:53 managed-node2 python3.12[55355]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:55 managed-node2 python3.12[55512]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd2.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:56 managed-node2 python3.12[55668]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd2.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:14:56 managed-node2 python3.12[55825]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:56 managed-node2 python3.12[55980]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd2.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:58 managed-node2 python3.12[56135]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:59 managed-node2 python3.12[56292]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/httpd3.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:00 managed-node2 python3.12[56448]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-httpd3.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:15:00 managed-node2 python3.12[56605]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:01 managed-node2 python3.12[56760]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/httpd3.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:02 managed-node2 python3.12[56915]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=True service=None split=None\nJul 07 20:15:02 managed-node2 python3.12[57071]: ansible-stat Invoked with path=/run/user/3001 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:03 managed-node2 python3.12[57226]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:04 managed-node2 python3.12[57381]: ansible-file Invoked with path=/tmp/lsr_m03kfbaj_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:06 managed-node2 python3.12[57587]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:07 managed-node2 python3.12[57771]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:07 managed-node2 python3.12[57926]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:09 managed-node2 python3.12[58236]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:10 managed-node2 python3.12[58398]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:10 managed-node2 python3.12[58554]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:12 managed-node2 python3.12[58711]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:13 managed-node2 python3.12[58868]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:14 managed-node2 python3.12[59023]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:14 managed-node2 python3.12[59148]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933714.0353012-19840-138139497644559/.source.pod dest=/etc/containers/systemd/quadlet-pod-pod.pod owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:15 managed-node2 python3.12[59303]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:15:15 managed-node2 systemd[1]: Reload requested from client PID 59304 ('systemctl') (unit session-5.scope)...\nJul 07 20:15:15 managed-node2 systemd[1]: Reloading...\nJul 07 20:15:15 managed-node2 systemd-rc-local-generator[59347]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:15:15 managed-node2 systemd[1]: Reloading finished in 191 ms.\nJul 07 20:15:16 managed-node2 python3.12[59512]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:15:16 managed-node2 systemd[1]: Starting quadlet-pod-pod-pod.service...\n\u2591\u2591 Subject: A start job for unit quadlet-pod-pod-pod.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-pod-pod-pod.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2532.\nJul 07 20:15:16 managed-node2 systemd[1]: Created slice machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice - cgroup machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice.\n\u2591\u2591 Subject: A start job for unit machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit machine-libpod_pod_fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2616.\nJul 07 20:15:16 managed-node2 podman[59516]: 2025-07-07 20:15:16.265732025 -0400 EDT m=+0.061820196 container create 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service)\nJul 07 20:15:16 managed-node2 podman[59516]: 2025-07-07 20:15:16.272319983 -0400 EDT m=+0.068408122 pod create fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 (image=, name=quadlet-pod)\nJul 07 20:15:16 managed-node2 quadlet-pod-pod-pod[59516]: fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519\nJul 07 20:15:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3361] manager: (podman0): new Bridge device (/org/freedesktop/NetworkManager/Devices/9)\nJul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered blocking state\nJul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered disabled state\nJul 07 20:15:16 managed-node2 kernel: veth0: entered allmulticast mode\nJul 07 20:15:16 managed-node2 kernel: veth0: entered promiscuous mode\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3447] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/10)\nJul 07 20:15:16 managed-node2 (udev-worker)[59536]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:15:16 managed-node2 (udev-worker)[59537]: Network interface NamePolicy= disabled on kernel command line.\nJul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered blocking state\nJul 07 20:15:16 managed-node2 kernel: podman0: port 1(veth0) entered forwarding state\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3546] device (veth0): carrier: link connected\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3548] device (podman0): carrier: link connected\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3683] device (podman0): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3701] device (podman0): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3707] device (podman0): Activation: starting connection 'podman0' (845713e4-a167-41fd-b540-293fb4a7aacd)\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3708] device (podman0): state change: disconnected -> prepare (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3710] device (podman0): state change: prepare -> config (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3712] device (podman0): state change: config -> ip-config (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.3714] device (podman0): state change: ip-config -> ip-check (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2622.\nJul 07 20:15:16 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2622.\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4101] device (podman0): state change: ip-check -> secondaries (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4105] device (podman0): state change: secondaries -> activated (reason 'none', managed-type: 'external')\nJul 07 20:15:16 managed-node2 NetworkManager[714]: [1751933716.4116] device (podman0): Activation: successful, device activated.\nJul 07 20:15:16 managed-node2 systemd[1]: Started libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2701.\nJul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.467418922 -0400 EDT m=+0.168638968 container init 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service)\nJul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.469862017 -0400 EDT m=+0.171082040 container start 6195e69957a5d2d8949199a8ee6dc85806f542c9dec461ce9010cea0f84685cb (image=, name=quadlet-pod-infra, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, PODMAN_SYSTEMD_UNIT=quadlet-pod-pod-pod.service)\nJul 07 20:15:16 managed-node2 podman[59525]: 2025-07-07 20:15:16.47596405 -0400 EDT m=+0.177184035 pod start fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519 (image=, name=quadlet-pod)\nJul 07 20:15:16 managed-node2 quadlet-pod-pod-pod[59525]: quadlet-pod\nJul 07 20:15:16 managed-node2 systemd[1]: Started quadlet-pod-pod-pod.service.\n\u2591\u2591 Subject: A start job for unit quadlet-pod-pod-pod.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-pod-pod-pod.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2532.\nJul 07 20:15:17 managed-node2 python3.12[59737]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:18 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:18 managed-node2 podman[59923]: 2025-07-07 20:15:18.970989919 -0400 EDT m=+0.333232348 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:15:19 managed-node2 python3.12[60094]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:19 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:19 managed-node2 python3.12[60249]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:20 managed-node2 python3.12[60374]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933719.570603-19951-98699047571118/.source.container dest=/etc/containers/systemd/quadlet-pod-container.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:20 managed-node2 python3.12[60529]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:15:20 managed-node2 systemd[1]: Reload requested from client PID 60530 ('systemctl') (unit session-5.scope)...\nJul 07 20:15:20 managed-node2 systemd[1]: Reloading...\nJul 07 20:15:20 managed-node2 systemd-rc-local-generator[60574]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:15:20 managed-node2 systemd[1]: Reloading finished in 199 ms.\nJul 07 20:15:21 managed-node2 python3.12[60739]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:15:21 managed-node2 systemd[1]: Starting quadlet-pod-container.service...\n\u2591\u2591 Subject: A start job for unit quadlet-pod-container.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-pod-container.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2708.\nJul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.598559882 -0400 EDT m=+0.046043355 container create 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service)\nJul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.641059859 -0400 EDT m=+0.088543485 container init 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, created_by=test/system/build-testimage, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z)\nJul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.643379876 -0400 EDT m=+0.090863506 container start 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d (image=quay.io/libpod/testimage:20210610, name=quadlet-pod-container, pod_id=fad3aff1d9a9ed34655a4f4bfc71bbd63812ca9ebb1ee367882a8205776aa519, io.buildah.version=1.21.0, PODMAN_SYSTEMD_UNIT=quadlet-pod-container.service, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:15:21 managed-node2 systemd[1]: Started quadlet-pod-container.service.\n\u2591\u2591 Subject: A start job for unit quadlet-pod-container.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit quadlet-pod-container.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2708.\nJul 07 20:15:21 managed-node2 quadlet-pod-container[60743]: 39e7825a90b7abbaa80db2f0c0a10aef7544e83b9db2932e2ffd86fefd46662d\nJul 07 20:15:21 managed-node2 podman[60743]: 2025-07-07 20:15:21.576103282 -0400 EDT m=+0.023586939 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610\nJul 07 20:15:22 managed-node2 python3.12[60912]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:23 managed-node2 python3.12[61068]: ansible-ansible.legacy.command Invoked with _raw_params=cat /etc/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:24 managed-node2 python3.12[61224]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}}\n {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:25 managed-node2 python3.12[61388]: ansible-user Invoked with name=user_quadlet_pod uid=2223 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:15:25 managed-node2 useradd[61390]: new group: name=user_quadlet_pod, GID=2223\nJul 07 20:15:25 managed-node2 useradd[61390]: new user: name=user_quadlet_pod, UID=2223, GID=2223, home=/home/user_quadlet_pod, shell=/bin/bash, from=/dev/pts/0\nJul 07 20:15:25 managed-node2 rsyslogd[883]: imjournal: journal files changed, reloading... [v8.2506.0-1.el10 try https://www.rsyslog.com/e/0 ]\nJul 07 20:15:25 managed-node2 rsyslogd[883]: imjournal: journal files changed, reloading... [v8.2506.0-1.el10 try https://www.rsyslog.com/e/0 ]\nJul 07 20:15:26 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:15:27 managed-node2 python3.12[61703]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:28 managed-node2 python3.12[61866]: ansible-getent Invoked with database=passwd key=user_quadlet_pod fail_key=False service=None split=None\nJul 07 20:15:28 managed-node2 python3.12[62022]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:29 managed-node2 python3.12[62180]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:29 managed-node2 python3.12[62336]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:31 managed-node2 python3.12[62492]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:32 managed-node2 python3.12[62649]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:32 managed-node2 python3.12[62805]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:33 managed-node2 python3.12[62961]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 07 20:15:33 managed-node2 systemd[1]: Created slice user-2223.slice - User Slice of UID 2223.\n\u2591\u2591 Subject: A start job for unit user-2223.slice has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-2223.slice has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2871.\nJul 07 20:15:33 managed-node2 systemd[1]: Starting user-runtime-dir@2223.service - User Runtime Directory /run/user/2223...\n\u2591\u2591 Subject: A start job for unit user-runtime-dir@2223.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-runtime-dir@2223.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2793.\nJul 07 20:15:34 managed-node2 systemd[1]: Finished user-runtime-dir@2223.service - User Runtime Directory /run/user/2223.\n\u2591\u2591 Subject: A start job for unit user-runtime-dir@2223.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user-runtime-dir@2223.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2793.\nJul 07 20:15:34 managed-node2 systemd[1]: Starting user@2223.service - User Manager for UID 2223...\n\u2591\u2591 Subject: A start job for unit user@2223.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user@2223.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2873.\nJul 07 20:15:34 managed-node2 systemd-logind[659]: New session 7 of user user_quadlet_pod.\n\u2591\u2591 Subject: A new session 7 has been created for user user_quadlet_pod\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 Documentation: sd-login(3)\n\u2591\u2591 \n\u2591\u2591 A new session with the ID 7 has been created for the user user_quadlet_pod.\n\u2591\u2591 \n\u2591\u2591 The leading process of the session is 62973.\nJul 07 20:15:34 managed-node2 (systemd)[62973]: pam_unix(systemd-user:session): session opened for user user_quadlet_pod(uid=2223) by user_quadlet_pod(uid=0)\nJul 07 20:15:34 managed-node2 systemd[62973]: Queued start job for default target default.target.\nJul 07 20:15:34 managed-node2 systemd[62973]: Created slice app.slice - User Application Slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 5.\nJul 07 20:15:34 managed-node2 systemd[62973]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 9.\nJul 07 20:15:34 managed-node2 systemd[62973]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 10.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target paths.target - Paths.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 11.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target timers.target - Timers.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 8.\nJul 07 20:15:34 managed-node2 systemd[62973]: Starting dbus.socket - D-Bus User Message Bus Socket...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4.\nJul 07 20:15:34 managed-node2 systemd[62973]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 12.\nJul 07 20:15:34 managed-node2 systemd[62973]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 12.\nJul 07 20:15:34 managed-node2 systemd[62973]: Listening on dbus.socket - D-Bus User Message Bus Socket.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target sockets.target - Sockets.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target basic.target - Basic System.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2.\nJul 07 20:15:34 managed-node2 systemd[62973]: Reached target default.target - Main User Target.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 1.\nJul 07 20:15:34 managed-node2 systemd[62973]: Startup finished in 74ms.\n\u2591\u2591 Subject: User manager start-up is now complete\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The user manager instance for user 2223 has been started. All services queued\n\u2591\u2591 for starting have been started. Note that other services might still be starting\n\u2591\u2591 up or be started at any later time.\n\u2591\u2591 \n\u2591\u2591 Startup of the manager took 74648 microseconds.\nJul 07 20:15:34 managed-node2 systemd[1]: Started user@2223.service - User Manager for UID 2223.\n\u2591\u2591 Subject: A start job for unit user@2223.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit user@2223.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 2873.\nJul 07 20:15:34 managed-node2 python3.12[63135]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:34 managed-node2 python3.12[63290]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:35 managed-node2 python3.12[63415]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933734.666246-20447-234683972003494/.source.pod dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=1884c880482430d8bf2e944b003734fb8b7a462d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:35 managed-node2 sudo[63620]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-mwdxpsyaqgiwosrsmcnobsvzckxuxltr ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933735.4058456-20475-145399815936558/AnsiballZ_systemd.py'\nJul 07 20:15:35 managed-node2 sudo[63620]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:35 managed-node2 python3.12[63623]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:15:35 managed-node2 python3.12[63623]: ansible-systemd [WARNING] Module remote_tmp /home/user_quadlet_pod/.ansible/tmp did not exist and was created with a mode of 0700, this may cause issues when running as another user. To avoid this, create the remote_tmp dir with the correct permissions manually\nJul 07 20:15:35 managed-node2 systemd[62973]: Reload requested from client PID 63624 ('systemctl')...\nJul 07 20:15:35 managed-node2 systemd[62973]: Reloading...\nJul 07 20:15:35 managed-node2 systemd[62973]: Reloading finished in 42 ms.\nJul 07 20:15:35 managed-node2 sudo[63620]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:36 managed-node2 sudo[63839]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ofgmoaezuncvsjzylzfbwkllzxyiawqa ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933736.0668013-20497-124358191920012/AnsiballZ_systemd.py'\nJul 07 20:15:36 managed-node2 sudo[63839]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:36 managed-node2 python3.12[63842]: ansible-systemd Invoked with name=quadlet-pod-pod-pod.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:15:36 managed-node2 systemd[62973]: Starting podman-user-wait-network-online.service - Wait for system level network-online.target as user....\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 25.\nJul 07 20:15:36 managed-node2 sh[63846]: active\nJul 07 20:15:36 managed-node2 systemd[62973]: Finished podman-user-wait-network-online.service - Wait for system level network-online.target as user..\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 25.\nJul 07 20:15:36 managed-node2 systemd[62973]: Starting quadlet-pod-pod-pod.service...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 13.\nJul 07 20:15:36 managed-node2 systemd[62973]: Created slice session.slice - User Core Session Slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 27.\nJul 07 20:15:36 managed-node2 systemd[62973]: Starting dbus-broker.service - D-Bus User Message Bus...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 26.\nJul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored\nJul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored\nJul 07 20:15:36 managed-node2 systemd[62973]: Started dbus-broker.service - D-Bus User Message Bus.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 26.\nJul 07 20:15:36 managed-node2 dbus-broker-launch[63870]: Ready\nJul 07 20:15:36 managed-node2 systemd[62973]: Created slice user.slice - Slice /user.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 33.\nJul 07 20:15:36 managed-node2 systemd[62973]: Created slice user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice - cgroup user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 32.\nJul 07 20:15:36 managed-node2 quadlet-pod-pod-pod[63855]: 704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de\nJul 07 20:15:36 managed-node2 systemd[62973]: Started podman-pause-569872be.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 36.\nJul 07 20:15:36 managed-node2 systemd[62973]: Started libpod-31f1b98498c312272d594cc143121f0c4e208b416f5b06370302a3ade84678f0.scope - libcrun container.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 40.\nJul 07 20:15:36 managed-node2 quadlet-pod-pod-pod[63874]: quadlet-pod\nJul 07 20:15:36 managed-node2 systemd[62973]: Started quadlet-pod-pod-pod.service.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 13.\nJul 07 20:15:36 managed-node2 sudo[63839]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:37 managed-node2 python3.12[64055]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:38 managed-node2 python3.12[64212]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:38 managed-node2 python3.12[64368]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:39 managed-node2 python3.12[64524]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/user_quadlet_pod _raw_params=loginctl enable-linger user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None\nJul 07 20:15:40 managed-node2 sudo[64729]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-opzncrxhlgpablgicvdgsyjydbaatunc ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933740.2555504-20657-253835226843587/AnsiballZ_podman_image.py'\nJul 07 20:15:40 managed-node2 sudo[64729]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:40 managed-node2 systemd[62973]: Started podman-64733.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 45.\nJul 07 20:15:40 managed-node2 systemd[62973]: Started podman-64740.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 49.\nJul 07 20:15:41 managed-node2 systemd[62973]: Started podman-64765.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 53.\nJul 07 20:15:41 managed-node2 sudo[64729]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:41 managed-node2 python3.12[64927]: ansible-file Invoked with path=/home/user_quadlet_pod/.config/containers/systemd state=directory owner=user_quadlet_pod group=2223 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:42 managed-node2 python3.12[65082]: ansible-ansible.legacy.stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:42 managed-node2 python3.12[65207]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933742.1096382-20721-81203128614920/.source.container dest=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container owner=user_quadlet_pod group=2223 mode=0644 follow=False _original_basename=systemd.j2 checksum=f0b5c8159fc3c65bf9310a371751609e4c1ba4c3 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:43 managed-node2 sudo[65412]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-dradgtbmatpvlibzybrbrgncbuvsbmla ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933742.8958044-20756-33448022762234/AnsiballZ_systemd.py'\nJul 07 20:15:43 managed-node2 sudo[65412]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:43 managed-node2 python3.12[65415]: ansible-systemd Invoked with daemon_reload=True scope=user daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None\nJul 07 20:15:43 managed-node2 systemd[62973]: Reload requested from client PID 65416 ('systemctl')...\nJul 07 20:15:43 managed-node2 systemd[62973]: Reloading...\nJul 07 20:15:43 managed-node2 systemd[62973]: Reloading finished in 50 ms.\nJul 07 20:15:43 managed-node2 sudo[65412]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:43 managed-node2 sudo[65630]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ypfsehmisorrgovabzrcscgtxcipauhh ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933743.5809875-20783-151961798381358/AnsiballZ_systemd.py'\nJul 07 20:15:43 managed-node2 sudo[65630]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:44 managed-node2 python3.12[65633]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None\nJul 07 20:15:44 managed-node2 systemd[62973]: Starting quadlet-pod-container.service...\n\u2591\u2591 Subject: A start job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 57.\nJul 07 20:15:44 managed-node2 systemd[62973]: Started quadlet-pod-container.service.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 57.\nJul 07 20:15:44 managed-node2 quadlet-pod-container[65636]: f4293ce7df9976771eedba45cd946f75d5668af766c076f83710b7afcc49d748\nJul 07 20:15:44 managed-node2 sudo[65630]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:44 managed-node2 python3.12[65805]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:45 managed-node2 python3.12[65961]: ansible-ansible.legacy.command Invoked with _raw_params=cat /home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-pod.pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:45 managed-node2 sudo[66167]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zkgmctvpahwcnyvziosokhuvkvmstaqp ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933745.1806111-20829-116188117400154/AnsiballZ_command.py'\nJul 07 20:15:45 managed-node2 sudo[66167]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:45 managed-node2 python3.12[66170]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod inspect quadlet-pod --format '{{range .Containers}}{{.Name}}\n {{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:45 managed-node2 systemd[62973]: Started podman-66171.scope.\n\u2591\u2591 Subject: A start job for unit UNIT has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit UNIT has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 71.\nJul 07 20:15:45 managed-node2 sudo[66167]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:15:46 managed-node2 python3.12[66333]: ansible-stat Invoked with path=/var/lib/systemd/linger/user_quadlet_pod follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:47 managed-node2 python3.12[66645]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:48 managed-node2 python3.12[66806]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:49 managed-node2 python3.12[66963]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:49 managed-node2 python3.12[67119]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:51 managed-node2 python3.12[67275]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:51 managed-node2 python3.12[67432]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:52 managed-node2 python3.12[67588]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:52 managed-node2 python3.12[67744]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:53 managed-node2 sudo[67951]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-qbrxmfhyqxuzmhgzlevjndrxencddmhb ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933753.0681589-21169-130280143861349/AnsiballZ_systemd.py'\nJul 07 20:15:53 managed-node2 sudo[67951]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:15:53 managed-node2 python3.12[67954]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:15:53 managed-node2 systemd[62973]: Reload requested from client PID 67957 ('systemctl')...\nJul 07 20:15:53 managed-node2 systemd[62973]: Reloading...\nJul 07 20:15:53 managed-node2 systemd[62973]: Reloading finished in 49 ms.\nJul 07 20:15:53 managed-node2 systemd[62973]: Stopping quadlet-pod-container.service...\n\u2591\u2591 Subject: A stop job for unit UNIT has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 75.\nJul 07 20:16:03 managed-node2 quadlet-pod-container[67969]: time=\"2025-07-07T20:16:03-04:00\" level=warning msg=\"StopSignal SIGTERM failed to stop container quadlet-pod-container in 10 seconds, resorting to SIGKILL\"\nJul 07 20:16:03 managed-node2 quadlet-pod-container[67969]: f4293ce7df9976771eedba45cd946f75d5668af766c076f83710b7afcc49d748\nJul 07 20:16:03 managed-node2 systemd[62973]: quadlet-pod-container.service: Main process exited, code=exited, status=137/n/a\n\u2591\u2591 Subject: Unit process exited\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 An ExecStart= process belonging to unit UNIT has exited.\n\u2591\u2591 \n\u2591\u2591 The process' exit code is 'exited' and its exit status is 137.\nJul 07 20:16:03 managed-node2 systemd[62973]: Removed slice user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice - cgroup user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 76 and the job result is done.\nJul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory\nJul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory\nJul 07 20:16:03 managed-node2 quadlet-pod-pod-pod[68000]: quadlet-pod\nJul 07 20:16:03 managed-node2 systemd[62973]: quadlet-pod-container.service: Failed with result 'exit-code'.\n\u2591\u2591 Subject: Unit failed\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit UNIT has entered the 'failed' state with result 'exit-code'.\nJul 07 20:16:03 managed-node2 systemd[62973]: Stopped quadlet-pod-container.service.\n\u2591\u2591 Subject: A stop job for unit UNIT has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit UNIT has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 75 and the job result is done.\nJul 07 20:16:03 managed-node2 sudo[67951]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:16:03 managed-node2 systemd[62973]: user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: Failed to open /run/user/2223/systemd/transient/user-libpod_pod_704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de.slice: No such file or directory\nJul 07 20:16:03 managed-node2 quadlet-pod-pod-pod[68018]: 704e993e3388f359fa429f3a488ca7e2b54dca1caa44e7a0a0d7faffb2fae2de\nJul 07 20:16:04 managed-node2 python3.12[68183]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:05 managed-node2 python3.12[68495]: ansible-ansible.legacy.command Invoked with _raw_params=set -x\n set -o pipefail\n exec 1>&2\n #podman volume rm --all\n #podman network prune -f\n podman volume ls\n podman network ls\n podman secret ls\n podman container ls\n podman pod ls\n podman images\n systemctl list-units | grep quadlet\n systemctl list-unit-files | grep quadlet\n ls -alrtF /etc/containers/systemd\n /usr/libexec/podman/quadlet -dryrun -v -no-kmsg-log\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:06 managed-node2 python3.12[68705]: ansible-ansible.legacy.command Invoked with _raw_params=grep type=AVC /var/log/audit/audit.log _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:06 managed-node2 python3.12[68861]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:08 managed-node2 python3.12[69172]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:09 managed-node2 python3.12[69333]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:09 managed-node2 python3.12[69490]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:10 managed-node2 python3.12[69646]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:11 managed-node2 python3.12[69802]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:12 managed-node2 python3.12[69959]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:12 managed-node2 python3.12[70115]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user_quadlet_pod _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:16:13 managed-node2 python3.12[70271]: ansible-stat Invoked with path=/run/user/2223 follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:13 managed-node2 sudo[70478]: root : TTY=pts/0 ; PWD=/root ; USER=user_quadlet_pod ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-zgbhusmnvfxgnyqpigdcfxpfmxddoleb ; XDG_RUNTIME_DIR=/run/user/2223 /usr/bin/python3.12 /var/tmp/ansible-tmp-1751933773.6131501-21423-280948783795652/AnsiballZ_systemd.py'\nJul 07 20:16:13 managed-node2 sudo[70478]: pam_unix(sudo:session): session opened for user user_quadlet_pod(uid=2223) by root(uid=0)\nJul 07 20:16:14 managed-node2 python3.12[70482]: ansible-systemd Invoked with name=quadlet-pod-container.service scope=user state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:16:14 managed-node2 systemd[62973]: Reload requested from client PID 70485 ('systemctl')...\nJul 07 20:16:14 managed-node2 systemd[62973]: Reloading...\nJul 07 20:16:14 managed-node2 systemd[62973]: Reloading finished in 47 ms.\nJul 07 20:16:14 managed-node2 sudo[70478]: pam_unix(sudo:session): session closed for user user_quadlet_pod\nJul 07 20:16:14 managed-node2 python3.12[70650]: ansible-stat Invoked with path=/home/user_quadlet_pod/.config/containers/systemd/quadlet-pod-container.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:16:15 managed-node2 python3.12[70963]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Dump journal", "task_path": "/tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:194" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Monday 07 July 2025 20:16:15 -0400 (0:00:00.431) 0:01:10.051 *********** =============================================================================== fedora.linux_system_roles.podman : Stop and disable service ------------ 10.95s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Check files ------------------------------------------------------------- 2.74s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:40 fedora.linux_system_roles.podman : Ensure container images are present --- 1.34s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Gathering Facts --------------------------------------------------------- 1.17s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:9 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.13s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Ensure container images are present --- 1.02s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Start service ------------------------ 0.99s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Debug3 ------------------------------------------------------------------ 0.96s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:127 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.94s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.94s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.94s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Reload systemctl --------------------- 0.91s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 fedora.linux_system_roles.podman : Start service ------------------------ 0.86s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.80s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.78s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 fedora.linux_system_roles.podman : Reload systemctl --------------------- 0.76s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 fedora.linux_system_roles.podman : Start service ------------------------ 0.75s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.75s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Check files ------------------------------------------------------------- 0.73s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_pod.yml:70 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.73s /tmp/collections-lTb/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75