ansible-playbook [core 2.17.12] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-AV4 executable location = /usr/local/bin/ansible-playbook python version = 3.12.11 (main, Jun 4 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-7)] (/usr/bin/python3.12) jinja version = 3.1.6 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles Skipping callback 'debug', as we already have a stdout callback. Skipping callback 'json', as we already have a stdout callback. Skipping callback 'jsonl', as we already have a stdout callback. Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_basic.yml ********************************************** 2 plays in /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:5 Monday 07 July 2025 20:15:08 -0400 (0:00:00.029) 0:00:00.029 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-b9i/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Ensure that the role can manage quadlet specs] *************************** TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 Monday 07 July 2025 20:15:08 -0400 (0:00:00.063) 0:00:00.093 *********** [WARNING]: Platform linux on host managed-node1 is using the discovered Python interpreter at /usr/bin/python3.9, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node1] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:55 Monday 07 July 2025 20:15:09 -0400 (0:00:01.088) 0:00:01.181 *********** skipping: [managed-node1] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:62 Monday 07 July 2025 20:15:09 -0400 (0:00:00.021) 0:00:01.203 *********** META: end_play conditional evaluated to False, continuing play skipping: [managed-node1] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Run role - do not pull images] ******************************************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:70 Monday 07 July 2025 20:15:09 -0400 (0:00:00.015) 0:00:01.218 *********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:15:09 -0400 (0:00:00.096) 0:00:01.314 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:15:09 -0400 (0:00:00.050) 0:00:01.365 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:15:09 -0400 (0:00:00.058) 0:00:01.423 *********** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:15:10 -0400 (0:00:00.521) 0:00:01.944 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:15:10 -0400 (0:00:00.035) 0:00:01.980 *********** ok: [managed-node1] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:15:10 -0400 (0:00:00.380) 0:00:02.360 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:15:10 -0400 (0:00:00.045) 0:00:02.406 *********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS_9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_9.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS_9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:15:10 -0400 (0:00:00.071) 0:00:02.478 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:15:11 -0400 (0:00:01.183) 0:00:03.661 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:15:11 -0400 (0:00:00.052) 0:00:03.713 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:15:12 -0400 (0:00:00.053) 0:00:03.767 *********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:15:12 -0400 (0:00:00.046) 0:00:03.814 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:15:12 -0400 (0:00:00.049) 0:00:03.864 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:15:12 -0400 (0:00:00.060) 0:00:03.924 *********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026633", "end": "2025-07-07 20:15:12.613357", "rc": 0, "start": "2025-07-07 20:15:12.586724" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:15:12 -0400 (0:00:00.496) 0:00:04.420 *********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:15:12 -0400 (0:00:00.055) 0:00:04.475 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:15:12 -0400 (0:00:00.038) 0:00:04.514 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:15:12 -0400 (0:00:00.052) 0:00:04.567 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:15:12 -0400 (0:00:00.055) 0:00:04.622 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:15:12 -0400 (0:00:00.073) 0:00:04.696 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:15:13 -0400 (0:00:00.073) 0:00:04.769 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:13 -0400 (0:00:00.103) 0:00:04.873 *********** ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "root", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:13 -0400 (0:00:00.558) 0:00:05.432 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:13 -0400 (0:00:00.078) 0:00:05.510 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:13 -0400 (0:00:00.069) 0:00:05.579 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751933454.6873221, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62", "ctime": 1751933427.4892416, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 665568, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748273472.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15496, "uid": 0, "version": "4278445899", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:14 -0400 (0:00:00.370) 0:00:05.949 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:14 -0400 (0:00:00.052) 0:00:06.001 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:14 -0400 (0:00:00.048) 0:00:06.050 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:14 -0400 (0:00:00.053) 0:00:06.104 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:14 -0400 (0:00:00.052) 0:00:06.156 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:14 -0400 (0:00:00.051) 0:00:06.207 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:14 -0400 (0:00:00.051) 0:00:06.259 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:14 -0400 (0:00:00.051) 0:00:06.311 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:15:14 -0400 (0:00:00.036) 0:00:06.347 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:15:14 -0400 (0:00:00.077) 0:00:06.424 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:15:14 -0400 (0:00:00.065) 0:00:06.490 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:15:14 -0400 (0:00:00.070) 0:00:06.560 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:15:14 -0400 (0:00:00.031) 0:00:06.591 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:15:14 -0400 (0:00:00.067) 0:00:06.659 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:15:14 -0400 (0:00:00.054) 0:00:06.714 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:15:15 -0400 (0:00:00.046) 0:00:06.761 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:15:15 -0400 (0:00:00.072) 0:00:06.833 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:15:15 -0400 (0:00:00.043) 0:00:06.877 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:15:15 -0400 (0:00:00.036) 0:00:06.913 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:15:15 -0400 (0:00:00.061) 0:00:06.975 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:15:15 -0400 (0:00:00.030) 0:00:07.006 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:15:15 -0400 (0:00:00.029) 0:00:07.035 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:15:15 -0400 (0:00:00.030) 0:00:07.066 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:15:15 -0400 (0:00:00.030) 0:00:07.096 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:15:15 -0400 (0:00:00.029) 0:00:07.126 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:15:15 -0400 (0:00:00.031) 0:00:07.157 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:15:15 -0400 (0:00:00.030) 0:00:07.188 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:15:15 -0400 (0:00:00.057) 0:00:07.245 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:15:15 -0400 (0:00:00.026) 0:00:07.272 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:15:15 -0400 (0:00:00.027) 0:00:07.300 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:15:15 -0400 (0:00:00.026) 0:00:07.326 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:15:15 -0400 (0:00:00.076) 0:00:07.403 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "nopull", "Image": "quay.io/libpod/testimage:20210610" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:15:15 -0400 (0:00:00.041) 0:00:07.445 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": false, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:15:15 -0400 (0:00:00.039) 0:00:07.484 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:15:15 -0400 (0:00:00.030) 0:00:07.514 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:15:15 -0400 (0:00:00.054) 0:00:07.569 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:15 -0400 (0:00:00.058) 0:00:07.628 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:15 -0400 (0:00:00.034) 0:00:07.663 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:15 -0400 (0:00:00.032) 0:00:07.696 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:15 -0400 (0:00:00.043) 0:00:07.739 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751933454.6873221, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62", "ctime": 1751933427.4892416, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 665568, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748273472.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15496, "uid": 0, "version": "4278445899", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:16 -0400 (0:00:00.365) 0:00:08.104 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:16 -0400 (0:00:00.032) 0:00:08.137 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:16 -0400 (0:00:00.030) 0:00:08.168 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:16 -0400 (0:00:00.032) 0:00:08.200 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:16 -0400 (0:00:00.067) 0:00:08.267 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:16 -0400 (0:00:00.032) 0:00:08.300 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:16 -0400 (0:00:00.033) 0:00:08.334 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:16 -0400 (0:00:00.033) 0:00:08.367 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:15:16 -0400 (0:00:00.033) 0:00:08.400 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "quay.io/libpod/testimage:20210610" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:15:16 -0400 (0:00:00.053) 0:00:08.454 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:15:16 -0400 (0:00:00.035) 0:00:08.489 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:15:16 -0400 (0:00:00.034) 0:00:08.524 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "quay.io/libpod/testimage:20210610" ], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:15:16 -0400 (0:00:00.084) 0:00:08.608 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:15:16 -0400 (0:00:00.038) 0:00:08.647 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Monday 07 July 2025 20:15:16 -0400 (0:00:00.030) 0:00:08.678 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 07 July 2025 20:15:17 -0400 (0:00:00.072) 0:00:08.750 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 07 July 2025 20:15:17 -0400 (0:00:00.054) 0:00:08.805 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 07 July 2025 20:15:17 -0400 (0:00:00.030) 0:00:08.835 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 07 July 2025 20:15:17 -0400 (0:00:00.031) 0:00:08.867 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 07 July 2025 20:15:17 -0400 (0:00:00.030) 0:00:08.897 *********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 07 July 2025 20:15:17 -0400 (0:00:00.030) 0:00:08.927 *********** skipping: [managed-node1] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 07 July 2025 20:15:17 -0400 (0:00:00.036) 0:00:08.964 *********** ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 43, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Monday 07 July 2025 20:15:17 -0400 (0:00:00.548) 0:00:09.512 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Monday 07 July 2025 20:15:17 -0400 (0:00:00.033) 0:00:09.546 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Monday 07 July 2025 20:15:17 -0400 (0:00:00.035) 0:00:09.582 *********** changed: [managed-node1] => { "changed": true, "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "dest": "/etc/containers/systemd/nopull.container", "gid": 0, "group": "root", "md5sum": "cedb6667f6cd1b033fe06e2810fe6b19", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 151, "src": "/root/.ansible/tmp/ansible-tmp-1751933717.8906143-17219-30430442070937/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Monday 07 July 2025 20:15:18 -0400 (0:00:00.794) 0:00:10.376 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Monday 07 July 2025 20:15:18 -0400 (0:00:00.033) 0:00:10.410 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Monday 07 July 2025 20:15:18 -0400 (0:00:00.038) 0:00:10.448 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Monday 07 July 2025 20:15:18 -0400 (0:00:00.037) 0:00:10.486 *********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Monday 07 July 2025 20:15:18 -0400 (0:00:00.028) 0:00:10.514 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Monday 07 July 2025 20:15:18 -0400 (0:00:00.027) 0:00:10.542 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled] ************************************************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:86 Monday 07 July 2025 20:15:18 -0400 (0:00:00.044) 0:00:10.586 *********** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Run role - try to pull bogus image] ************************************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:90 Monday 07 July 2025 20:15:18 -0400 (0:00:00.042) 0:00:10.629 *********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:15:18 -0400 (0:00:00.093) 0:00:10.722 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:15:19 -0400 (0:00:00.052) 0:00:10.775 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:15:19 -0400 (0:00:00.036) 0:00:10.812 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:15:19 -0400 (0:00:00.030) 0:00:10.842 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:15:19 -0400 (0:00:00.029) 0:00:10.871 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:15:19 -0400 (0:00:00.062) 0:00:10.933 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:15:19 -0400 (0:00:00.031) 0:00:10.965 *********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS_9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_9.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS_9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:15:19 -0400 (0:00:00.064) 0:00:11.030 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:15:20 -0400 (0:00:00.806) 0:00:11.836 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:15:20 -0400 (0:00:00.031) 0:00:11.868 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:15:20 -0400 (0:00:00.036) 0:00:11.904 *********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:15:20 -0400 (0:00:00.032) 0:00:11.937 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:15:20 -0400 (0:00:00.031) 0:00:11.969 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:15:20 -0400 (0:00:00.032) 0:00:12.001 *********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.023469", "end": "2025-07-07 20:15:20.570836", "rc": 0, "start": "2025-07-07 20:15:20.547367" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:15:20 -0400 (0:00:00.379) 0:00:12.380 *********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:15:20 -0400 (0:00:00.034) 0:00:12.414 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:15:20 -0400 (0:00:00.029) 0:00:12.444 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:15:20 -0400 (0:00:00.033) 0:00:12.478 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:15:20 -0400 (0:00:00.036) 0:00:12.514 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:15:20 -0400 (0:00:00.042) 0:00:12.557 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:15:20 -0400 (0:00:00.080) 0:00:12.637 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:20 -0400 (0:00:00.071) 0:00:12.709 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:21 -0400 (0:00:00.035) 0:00:12.745 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:21 -0400 (0:00:00.037) 0:00:12.783 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:21 -0400 (0:00:00.044) 0:00:12.827 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751933454.6873221, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62", "ctime": 1751933427.4892416, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 665568, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748273472.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15496, "uid": 0, "version": "4278445899", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:21 -0400 (0:00:00.364) 0:00:13.191 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:21 -0400 (0:00:00.035) 0:00:13.226 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:21 -0400 (0:00:00.033) 0:00:13.260 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:21 -0400 (0:00:00.035) 0:00:13.296 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:21 -0400 (0:00:00.034) 0:00:13.330 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:21 -0400 (0:00:00.034) 0:00:13.365 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:21 -0400 (0:00:00.034) 0:00:13.399 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:21 -0400 (0:00:00.034) 0:00:13.434 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:15:21 -0400 (0:00:00.034) 0:00:13.468 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:15:21 -0400 (0:00:00.042) 0:00:13.511 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:15:21 -0400 (0:00:00.059) 0:00:13.571 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:15:21 -0400 (0:00:00.032) 0:00:13.604 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:15:21 -0400 (0:00:00.030) 0:00:13.634 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:15:21 -0400 (0:00:00.095) 0:00:13.730 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:15:22 -0400 (0:00:00.030) 0:00:13.761 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:15:22 -0400 (0:00:00.031) 0:00:13.792 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:15:22 -0400 (0:00:00.061) 0:00:13.854 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:15:22 -0400 (0:00:00.035) 0:00:13.889 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:15:22 -0400 (0:00:00.031) 0:00:13.921 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:15:22 -0400 (0:00:00.064) 0:00:13.986 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:15:22 -0400 (0:00:00.031) 0:00:14.018 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:15:22 -0400 (0:00:00.029) 0:00:14.047 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:15:22 -0400 (0:00:00.031) 0:00:14.079 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:15:22 -0400 (0:00:00.033) 0:00:14.112 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:15:22 -0400 (0:00:00.031) 0:00:14.143 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:15:22 -0400 (0:00:00.032) 0:00:14.176 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:15:22 -0400 (0:00:00.031) 0:00:14.208 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:15:22 -0400 (0:00:00.029) 0:00:14.237 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:15:22 -0400 (0:00:00.028) 0:00:14.266 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:15:22 -0400 (0:00:00.062) 0:00:14.328 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:15:22 -0400 (0:00:00.028) 0:00:14.356 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:15:22 -0400 (0:00:00.075) 0:00:14.432 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": { "Container": { "ContainerName": "bogus", "Image": "this_is_a_bogus_image" }, "Install": { "WantedBy": "default.target" } }, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:15:22 -0400 (0:00:00.040) 0:00:14.473 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": true, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:15:22 -0400 (0:00:00.039) 0:00:14.512 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_spec | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:15:22 -0400 (0:00:00.030) 0:00:14.543 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "bogus", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:15:22 -0400 (0:00:00.046) 0:00:14.590 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:22 -0400 (0:00:00.057) 0:00:14.647 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:22 -0400 (0:00:00.036) 0:00:14.683 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:22 -0400 (0:00:00.038) 0:00:14.722 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:23 -0400 (0:00:00.042) 0:00:14.765 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751933454.6873221, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62", "ctime": 1751933427.4892416, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 665568, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748273472.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15496, "uid": 0, "version": "4278445899", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:23 -0400 (0:00:00.362) 0:00:15.128 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:23 -0400 (0:00:00.033) 0:00:15.161 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:23 -0400 (0:00:00.032) 0:00:15.194 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:23 -0400 (0:00:00.032) 0:00:15.226 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:23 -0400 (0:00:00.031) 0:00:15.258 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:23 -0400 (0:00:00.032) 0:00:15.290 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:23 -0400 (0:00:00.065) 0:00:15.356 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:23 -0400 (0:00:00.032) 0:00:15.388 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:15:23 -0400 (0:00:00.032) 0:00:15.421 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": false, "__podman_images_found": [ "this_is_a_bogus_image" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "bogus.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:15:23 -0400 (0:00:00.052) 0:00:15.473 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:15:23 -0400 (0:00:00.035) 0:00:15.509 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:15:23 -0400 (0:00:00.032) 0:00:15.541 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [ "this_is_a_bogus_image" ], "__podman_quadlet_file": "/etc/containers/systemd/bogus.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:15:23 -0400 (0:00:00.074) 0:00:15.616 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:15:23 -0400 (0:00:00.037) 0:00:15.653 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:117 Monday 07 July 2025 20:15:23 -0400 (0:00:00.029) 0:00:15.683 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Monday 07 July 2025 20:15:24 -0400 (0:00:00.075) 0:00:15.759 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Monday 07 July 2025 20:15:24 -0400 (0:00:00.053) 0:00:15.812 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Monday 07 July 2025 20:15:24 -0400 (0:00:00.030) 0:00:15.842 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Monday 07 July 2025 20:15:24 -0400 (0:00:00.029) 0:00:15.872 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Monday 07 July 2025 20:15:24 -0400 (0:00:00.029) 0:00:15.901 *********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Monday 07 July 2025 20:15:24 -0400 (0:00:00.027) 0:00:15.929 *********** ok: [managed-node1] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Monday 07 July 2025 20:15:24 -0400 (0:00:00.582) 0:00:16.511 *********** ok: [managed-node1] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:50 Monday 07 July 2025 20:15:25 -0400 (0:00:00.369) 0:00:16.881 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:62 Monday 07 July 2025 20:15:25 -0400 (0:00:00.033) 0:00:16.914 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_quadlet_str | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 Monday 07 July 2025 20:15:25 -0400 (0:00:00.074) 0:00:16.989 *********** changed: [managed-node1] => { "changed": true, "checksum": "1d087e679d135214e8ac9ccaf33b2222916efb7f", "dest": "/etc/containers/systemd/bogus.container", "gid": 0, "group": "root", "md5sum": "97480a9a73734d9f8007d2c06e7fed1f", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 138, "src": "/root/.ansible/tmp/ansible-tmp-1751933725.2913904-17409-127642446506455/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:87 Monday 07 July 2025 20:15:25 -0400 (0:00:00.678) 0:00:17.668 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:115 Monday 07 July 2025 20:15:25 -0400 (0:00:00.034) 0:00:17.702 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:131 Monday 07 July 2025 20:15:25 -0400 (0:00:00.041) 0:00:17.744 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_activate_systemd_unit | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:198 Monday 07 July 2025 20:15:26 -0400 (0:00:00.046) 0:00:17.790 *********** skipping: [managed-node1] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:204 Monday 07 July 2025 20:15:26 -0400 (0:00:00.031) 0:00:17.822 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:213 Monday 07 July 2025 20:15:26 -0400 (0:00:00.050) 0:00:17.872 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Verify image not pulled and no error] ************************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:106 Monday 07 July 2025 20:15:26 -0400 (0:00:00.063) 0:00:17.936 *********** ok: [managed-node1] => { "changed": false } MSG: All assertions passed TASK [Cleanup] ***************************************************************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:113 Monday 07 July 2025 20:15:26 -0400 (0:00:00.040) 0:00:17.977 *********** included: fedora.linux_system_roles.podman for managed-node1 => (item=nopull) included: fedora.linux_system_roles.podman for managed-node1 => (item=bogus) TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:15:26 -0400 (0:00:00.186) 0:00:18.164 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:15:26 -0400 (0:00:00.052) 0:00:18.216 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:15:26 -0400 (0:00:00.044) 0:00:18.260 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:15:26 -0400 (0:00:00.046) 0:00:18.307 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:15:26 -0400 (0:00:00.052) 0:00:18.360 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:15:26 -0400 (0:00:00.037) 0:00:18.397 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:15:26 -0400 (0:00:00.085) 0:00:18.483 *********** [WARNING]: TASK: fedora.linux_system_roles.podman : Set platform/version specific variables: The loop variable 'item' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior. ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS_9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_9.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS_9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:15:26 -0400 (0:00:00.070) 0:00:18.553 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:15:27 -0400 (0:00:00.872) 0:00:19.426 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:15:27 -0400 (0:00:00.056) 0:00:19.482 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:15:27 -0400 (0:00:00.064) 0:00:19.547 *********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:15:27 -0400 (0:00:00.057) 0:00:19.605 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:15:27 -0400 (0:00:00.057) 0:00:19.662 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:15:27 -0400 (0:00:00.054) 0:00:19.717 *********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025828", "end": "2025-07-07 20:15:28.330382", "rc": 0, "start": "2025-07-07 20:15:28.304554" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:15:28 -0400 (0:00:00.440) 0:00:20.157 *********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:15:28 -0400 (0:00:00.040) 0:00:20.198 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:15:28 -0400 (0:00:00.039) 0:00:20.237 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:15:28 -0400 (0:00:00.043) 0:00:20.281 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:15:28 -0400 (0:00:00.041) 0:00:20.323 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:15:28 -0400 (0:00:00.046) 0:00:20.369 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:15:28 -0400 (0:00:00.046) 0:00:20.416 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:28 -0400 (0:00:00.128) 0:00:20.544 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:28 -0400 (0:00:00.062) 0:00:20.607 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:28 -0400 (0:00:00.061) 0:00:20.669 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:28 -0400 (0:00:00.072) 0:00:20.742 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751933454.6873221, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62", "ctime": 1751933427.4892416, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 665568, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748273472.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15496, "uid": 0, "version": "4278445899", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:29 -0400 (0:00:00.401) 0:00:21.143 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:29 -0400 (0:00:00.054) 0:00:21.198 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:29 -0400 (0:00:00.057) 0:00:21.255 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:29 -0400 (0:00:00.057) 0:00:21.312 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:29 -0400 (0:00:00.055) 0:00:21.368 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:29 -0400 (0:00:00.056) 0:00:21.424 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:29 -0400 (0:00:00.056) 0:00:21.480 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:29 -0400 (0:00:00.054) 0:00:21.535 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Monday 07 July 2025 20:15:29 -0400 (0:00:00.055) 0:00:21.591 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_parent_mode": "0755", "__podman_parent_path": "/etc/containers", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:126 Monday 07 July 2025 20:15:29 -0400 (0:00:00.070) 0:00:21.662 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Monday 07 July 2025 20:15:30 -0400 (0:00:00.106) 0:00:21.768 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Monday 07 July 2025 20:15:30 -0400 (0:00:00.058) 0:00:21.826 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:129 Monday 07 July 2025 20:15:30 -0400 (0:00:00.064) 0:00:21.891 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Monday 07 July 2025 20:15:30 -0400 (0:00:00.168) 0:00:22.060 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Monday 07 July 2025 20:15:30 -0400 (0:00:00.056) 0:00:22.116 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:132 Monday 07 July 2025 20:15:30 -0400 (0:00:00.055) 0:00:22.171 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:7 Monday 07 July 2025 20:15:30 -0400 (0:00:00.112) 0:00:22.284 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:15 Monday 07 July 2025 20:15:30 -0400 (0:00:00.056) 0:00:22.341 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:135 Monday 07 July 2025 20:15:30 -0400 (0:00:00.055) 0:00:22.396 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:8 Monday 07 July 2025 20:15:30 -0400 (0:00:00.096) 0:00:22.492 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:16 Monday 07 July 2025 20:15:30 -0400 (0:00:00.041) 0:00:22.534 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:21 Monday 07 July 2025 20:15:30 -0400 (0:00:00.039) 0:00:22.573 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:27 Monday 07 July 2025 20:15:30 -0400 (0:00:00.037) 0:00:22.611 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:141 Monday 07 July 2025 20:15:30 -0400 (0:00:00.033) 0:00:22.644 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_firewall | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:148 Monday 07 July 2025 20:15:30 -0400 (0:00:00.031) 0:00:22.675 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:155 Monday 07 July 2025 20:15:30 -0400 (0:00:00.031) 0:00:22.707 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:159 Monday 07 July 2025 20:15:30 -0400 (0:00:00.031) 0:00:22.738 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:168 Monday 07 July 2025 20:15:31 -0400 (0:00:00.031) 0:00:22.769 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:177 Monday 07 July 2025 20:15:31 -0400 (0:00:00.030) 0:00:22.800 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:184 Monday 07 July 2025 20:15:31 -0400 (0:00:00.051) 0:00:22.851 *********** skipping: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:191 Monday 07 July 2025 20:15:31 -0400 (0:00:00.053) 0:00:22.905 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node1 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Monday 07 July 2025 20:15:31 -0400 (0:00:00.185) 0:00:23.091 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Monday 07 July 2025 20:15:31 -0400 (0:00:00.060) 0:00:23.151 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Monday 07 July 2025 20:15:31 -0400 (0:00:00.048) 0:00:23.200 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Monday 07 July 2025 20:15:31 -0400 (0:00:00.044) 0:00:23.244 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_name": "nopull", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Monday 07 July 2025 20:15:31 -0400 (0:00:00.062) 0:00:23.306 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:31 -0400 (0:00:00.062) 0:00:23.369 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:31 -0400 (0:00:00.036) 0:00:23.406 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Monday 07 July 2025 20:15:31 -0400 (0:00:00.036) 0:00:23.443 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Monday 07 July 2025 20:15:31 -0400 (0:00:00.045) 0:00:23.488 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751933454.6873221, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "8bedde2dbca15219e1a3b95a68a8c0d26a92ba62", "ctime": 1751933427.4892416, "dev": 51713, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 665568, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1748273472.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15496, "uid": 0, "version": "4278445899", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Monday 07 July 2025 20:15:32 -0400 (0:00:00.367) 0:00:23.855 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Monday 07 July 2025 20:15:32 -0400 (0:00:00.034) 0:00:23.890 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Monday 07 July 2025 20:15:32 -0400 (0:00:00.039) 0:00:23.929 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Monday 07 July 2025 20:15:32 -0400 (0:00:00.038) 0:00:23.968 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Monday 07 July 2025 20:15:32 -0400 (0:00:00.035) 0:00:24.003 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Monday 07 July 2025 20:15:32 -0400 (0:00:00.034) 0:00:24.038 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Monday 07 July 2025 20:15:32 -0400 (0:00:00.035) 0:00:24.073 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Monday 07 July 2025 20:15:32 -0400 (0:00:00.034) 0:00:24.107 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Monday 07 July 2025 20:15:32 -0400 (0:00:00.080) 0:00:24.188 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "nopull.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Monday 07 July 2025 20:15:32 -0400 (0:00:00.056) 0:00:24.244 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Monday 07 July 2025 20:15:32 -0400 (0:00:00.038) 0:00:24.283 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:88 Monday 07 July 2025 20:15:32 -0400 (0:00:00.031) 0:00:24.314 *********** ok: [managed-node1] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/nopull.container", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:106 Monday 07 July 2025 20:15:32 -0400 (0:00:00.077) 0:00:24.391 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:113 Monday 07 July 2025 20:15:32 -0400 (0:00:00.039) 0:00:24.430 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Monday 07 July 2025 20:15:32 -0400 (0:00:00.077) 0:00:24.508 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Monday 07 July 2025 20:15:32 -0400 (0:00:00.032) 0:00:24.540 *********** ok: [managed-node1] => { "changed": false, "failed_when_result": false } MSG: Could not find the requested service nopull.service: host TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:34 Monday 07 July 2025 20:15:33 -0400 (0:00:00.698) 0:00:25.238 *********** ok: [managed-node1] => { "changed": false, "stat": { "atime": 1751933718.5601475, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "670d64fc68a9768edb20cad26df2acc703542d85", "ctime": 1751933718.5641477, "dev": 51713, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 226492622, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1751933718.2091465, "nlink": 1, "path": "/etc/containers/systemd/nopull.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 151, "uid": 0, "version": "1911239089", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:39 Monday 07 July 2025 20:15:33 -0400 (0:00:00.370) 0:00:25.609 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Monday 07 July 2025 20:15:33 -0400 (0:00:00.057) 0:00:25.666 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Monday 07 July 2025 20:15:34 -0400 (0:00:00.429) 0:00:26.095 *********** fatal: [managed-node1]: FAILED! => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result" } TASK [Debug3] ****************************************************************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270 Monday 07 July 2025 20:15:34 -0400 (0:00:00.040) 0:00:26.136 *********** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": "set -x\nset -o pipefail\nexec 1>&2\n#podman volume rm --all\n#podman network prune -f\npodman volume ls\npodman network ls\npodman secret ls\npodman container ls\npodman pod ls\npodman images\nsystemctl list-units | grep quadlet\n", "delta": "0:00:00.179637", "end": "2025-07-07 20:15:34.863787", "rc": 1, "start": "2025-07-07 20:15:34.684150" } STDERR: + set -o pipefail + exec + podman volume ls DRIVER VOLUME NAME + podman network ls NETWORK ID NAME DRIVER 2f259bab93aa podman bridge f12c2088dbae podman-default-kube-network bridge + podman secret ls ID NAME DRIVER CREATED UPDATED + podman container ls CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + podman pod ls POD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS + podman images REPOSITORY TAG IMAGE ID CREATED SIZE quay.io/libpod/registry 2.8.2 0030ba3d620c 23 months ago 24.6 MB localhost:5000/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB quay.io/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB + systemctl list-units + grep quadlet MSG: non-zero return code TASK [Cleanup user] ************************************************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:299 Monday 07 July 2025 20:15:34 -0400 (0:00:00.538) 0:00:26.675 *********** included: fedora.linux_system_roles.podman for managed-node1 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Monday 07 July 2025 20:15:35 -0400 (0:00:00.077) 0:00:26.752 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Monday 07 July 2025 20:15:35 -0400 (0:00:00.097) 0:00:26.850 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Monday 07 July 2025 20:15:35 -0400 (0:00:00.039) 0:00:26.889 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Monday 07 July 2025 20:15:35 -0400 (0:00:00.031) 0:00:26.920 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Monday 07 July 2025 20:15:35 -0400 (0:00:00.032) 0:00:26.953 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Monday 07 July 2025 20:15:35 -0400 (0:00:00.040) 0:00:26.993 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Monday 07 July 2025 20:15:35 -0400 (0:00:00.033) 0:00:27.027 *********** ok: [managed-node1] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node1] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS_9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_9.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node1] => (item=CentOS_9.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS_9.yml", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Monday 07 July 2025 20:15:35 -0400 (0:00:00.066) 0:00:27.093 *********** ok: [managed-node1] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Monday 07 July 2025 20:15:36 -0400 (0:00:00.800) 0:00:27.893 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Monday 07 July 2025 20:15:36 -0400 (0:00:00.031) 0:00:27.924 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages)) | list | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Monday 07 July 2025 20:15:36 -0400 (0:00:00.038) 0:00:27.963 *********** skipping: [managed-node1] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Monday 07 July 2025 20:15:36 -0400 (0:00:00.037) 0:00:28.000 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Monday 07 July 2025 20:15:36 -0400 (0:00:00.035) 0:00:28.035 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Monday 07 July 2025 20:15:36 -0400 (0:00:00.033) 0:00:28.069 *********** ok: [managed-node1] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.026607", "end": "2025-07-07 20:15:36.645995", "rc": 0, "start": "2025-07-07 20:15:36.619388" } STDOUT: podman version 5.5.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Monday 07 July 2025 20:15:36 -0400 (0:00:00.385) 0:00:28.455 *********** ok: [managed-node1] => { "ansible_facts": { "podman_version": "5.5.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Monday 07 July 2025 20:15:36 -0400 (0:00:00.034) 0:00:28.490 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Monday 07 July 2025 20:15:36 -0400 (0:00:00.029) 0:00:28.520 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Monday 07 July 2025 20:15:36 -0400 (0:00:00.108) 0:00:28.628 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Monday 07 July 2025 20:15:36 -0400 (0:00:00.063) 0:00:28.692 *********** skipping: [managed-node1] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Monday 07 July 2025 20:15:37 -0400 (0:00:00.061) 0:00:28.754 *********** META: end_host conditional evaluated to False, continuing execution for managed-node1 skipping: [managed-node1] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node1" } MSG: end_host conditional evaluated to false, continuing execution for managed-node1 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Monday 07 July 2025 20:15:37 -0400 (0:00:00.063) 0:00:28.818 *********** included: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node1 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Monday 07 July 2025 20:15:37 -0400 (0:00:00.064) 0:00:28.883 *********** ok: [managed-node1] => { "ansible_facts": { "getent_passwd": { "user_quadlet_basic": null } }, "changed": false } MSG: One or more supplied key could not be found in the database. TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Monday 07 July 2025 20:15:37 -0400 (0:00:00.370) 0:00:29.254 *********** fatal: [managed-node1]: FAILED! => { "changed": false } MSG: The given podman user [user_quadlet_basic] does not exist - cannot continue TASK [Dump journal] ************************************************************ task path: /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336 Monday 07 July 2025 20:15:37 -0400 (0:00:00.042) 0:00:29.296 *********** fatal: [managed-node1]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.051019", "end": "2025-07-07 20:15:37.894301", "failed_when_result": true, "rc": 0, "start": "2025-07-07 20:15:37.843282" } STDOUT: Jul 07 20:12:41 managed-node1 podman[33622]: 2025-07-07 20:12:41.50636717 -0400 EDT m=+0.060702870 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:41 managed-node1 python3.9[33616]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3725. Jul 07 20:12:41 managed-node1 podman[33622]: 2025-07-07 20:12:41.567765779 -0400 EDT m=+0.122101592 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:41 managed-node1 podman[33622]: 2025-07-07 20:12:41.57173424 -0400 EDT m=+0.126069868 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:41 managed-node1 conmon[33633]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33633]: This container is intended for podman CI testing Jul 07 20:12:41 managed-node1 podman[33643]: 2025-07-07 20:12:41.621077845 -0400 EDT m=+0.032903868 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:41 managed-node1 podman[33643]: 2025-07-07 20:12:41.635371985 -0400 EDT m=+0.047197781 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3730. Jul 07 20:12:41 managed-node1 podman[33643]: 2025-07-07 20:12:41.684616758 -0400 EDT m=+0.096442569 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:41 managed-node1 podman[33643]: 2025-07-07 20:12:41.688361824 -0400 EDT m=+0.100187675 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33679]: This container is intended for podman CI testing Jul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:41 managed-node1 podman[33683]: 2025-07-07 20:12:41.721345697 -0400 EDT m=+0.023159256 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:41 managed-node1 podman[33683]: 2025-07-07 20:12:41.734216339 -0400 EDT m=+0.036029915 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3735. Jul 07 20:12:41 managed-node1 podman[33683]: 2025-07-07 20:12:41.785871287 -0400 EDT m=+0.087684859 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:41 managed-node1 podman[33683]: 2025-07-07 20:12:41.788852193 -0400 EDT m=+0.090665898 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33694]: This container is intended for podman CI testing Jul 07 20:12:41 managed-node1 podman[33698]: 2025-07-07 20:12:41.824660189 -0400 EDT m=+0.023362688 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:41 managed-node1 podman[33698]: 2025-07-07 20:12:41.837386611 -0400 EDT m=+0.036089125 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3740. Jul 07 20:12:41 managed-node1 podman[33698]: 2025-07-07 20:12:41.883258808 -0400 EDT m=+0.081961361 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:41 managed-node1 podman[33698]: 2025-07-07 20:12:41.886125769 -0400 EDT m=+0.084828300 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33709]: This container is intended for podman CI testing Jul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:41 managed-node1 podman[33713]: 2025-07-07 20:12:41.920368053 -0400 EDT m=+0.023513273 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:41 managed-node1 podman[33713]: 2025-07-07 20:12:41.933238289 -0400 EDT m=+0.036383481 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3745. Jul 07 20:12:41 managed-node1 podman[33713]: 2025-07-07 20:12:41.981589962 -0400 EDT m=+0.084735325 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:41 managed-node1 podman[33713]: 2025-07-07 20:12:41.986739296 -0400 EDT m=+0.089884524 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33724]: This container is intended for podman CI testing Jul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 podman[33728]: 2025-07-07 20:12:42.021558667 -0400 EDT m=+0.024961424 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 podman[33728]: 2025-07-07 20:12:42.034303085 -0400 EDT m=+0.037705836 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3750. Jul 07 20:12:42 managed-node1 podman[33728]: 2025-07-07 20:12:42.085120285 -0400 EDT m=+0.088523050 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 podman[33728]: 2025-07-07 20:12:42.088183664 -0400 EDT m=+0.091586459 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33739]: This container is intended for podman CI testing Jul 07 20:12:42 managed-node1 conmon[33739]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:42 managed-node1 podman[33743]: 2025-07-07 20:12:42.12308754 -0400 EDT m=+0.023361800 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:42 managed-node1 podman[33743]: 2025-07-07 20:12:42.135582446 -0400 EDT m=+0.035856641 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3755. Jul 07 20:12:42 managed-node1 podman[33743]: 2025-07-07 20:12:42.187408375 -0400 EDT m=+0.087682662 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 podman[33743]: 2025-07-07 20:12:42.191561529 -0400 EDT m=+0.091835831 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33754]: This container is intended for podman CI testing Jul 07 20:12:42 managed-node1 conmon[33754]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:42 managed-node1 podman[33761]: 2025-07-07 20:12:42.246794704 -0400 EDT m=+0.035170840 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:42 managed-node1 podman[33761]: 2025-07-07 20:12:42.263541922 -0400 EDT m=+0.051917974 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3760. Jul 07 20:12:42 managed-node1 rsyslogd[809]: imjournal: journal files changed, reloading... [v8.2412.0-2.el9 try https://www.rsyslog.com/e/0 ] Jul 07 20:12:42 managed-node1 podman[33761]: 2025-07-07 20:12:42.319658138 -0400 EDT m=+0.108034286 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33828]: This container is intended for podman CI testing Jul 07 20:12:42 managed-node1 podman[33761]: 2025-07-07 20:12:42.337549515 -0400 EDT m=+0.125925516 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 podman[33854]: 2025-07-07 20:12:42.381151412 -0400 EDT m=+0.028864286 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:42 managed-node1 podman[33854]: 2025-07-07 20:12:42.397512131 -0400 EDT m=+0.045224977 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3765. Jul 07 20:12:42 managed-node1 podman[33854]: 2025-07-07 20:12:42.458716015 -0400 EDT m=+0.106428982 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33910]: This container is intended for podman CI testing Jul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 conmon[33910]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:42 managed-node1 podman[33854]: 2025-07-07 20:12:42.464861994 -0400 EDT m=+0.112574931 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:42 managed-node1 podman[33914]: 2025-07-07 20:12:42.518544455 -0400 EDT m=+0.037340984 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:42 managed-node1 podman[33914]: 2025-07-07 20:12:42.533005797 -0400 EDT m=+0.051802239 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 python3.9[33908]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3770. Jul 07 20:12:42 managed-node1 podman[33914]: 2025-07-07 20:12:42.597515745 -0400 EDT m=+0.116312289 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:42 managed-node1 podman[33914]: 2025-07-07 20:12:42.601910611 -0400 EDT m=+0.120706991 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33925]: This container is intended for podman CI testing Jul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 podman[33931]: 2025-07-07 20:12:42.649266086 -0400 EDT m=+0.035775114 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 podman[33931]: 2025-07-07 20:12:42.662040499 -0400 EDT m=+0.048549443 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3775. Jul 07 20:12:42 managed-node1 podman[33931]: 2025-07-07 20:12:42.704656431 -0400 EDT m=+0.091165403 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:42 managed-node1 podman[33931]: 2025-07-07 20:12:42.708298645 -0400 EDT m=+0.094807598 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33966]: This container is intended for podman CI testing Jul 07 20:12:42 managed-node1 podman[33970]: 2025-07-07 20:12:42.740044829 -0400 EDT m=+0.022495809 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 podman[33970]: 2025-07-07 20:12:42.752990822 -0400 EDT m=+0.035441768 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3780. Jul 07 20:12:42 managed-node1 podman[33970]: 2025-07-07 20:12:42.800279156 -0400 EDT m=+0.082730097 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33981]: This container is intended for podman CI testing Jul 07 20:12:42 managed-node1 podman[33970]: 2025-07-07 20:12:42.803693605 -0400 EDT m=+0.086144594 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 conmon[33981]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:42 managed-node1 podman[33985]: 2025-07-07 20:12:42.838450335 -0400 EDT m=+0.022735162 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:42 managed-node1 podman[33985]: 2025-07-07 20:12:42.851144871 -0400 EDT m=+0.035429657 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3785. Jul 07 20:12:42 managed-node1 podman[33985]: 2025-07-07 20:12:42.906726377 -0400 EDT m=+0.091011163 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:42 managed-node1 podman[33985]: 2025-07-07 20:12:42.909767637 -0400 EDT m=+0.094052553 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33996]: This container is intended for podman CI testing Jul 07 20:12:42 managed-node1 conmon[33996]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:42 managed-node1 podman[34000]: 2025-07-07 20:12:42.94603736 -0400 EDT m=+0.022389547 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:42 managed-node1 podman[34000]: 2025-07-07 20:12:42.95838577 -0400 EDT m=+0.034737914 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3790. Jul 07 20:12:43 managed-node1 podman[34000]: 2025-07-07 20:12:43.010324461 -0400 EDT m=+0.086676634 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 podman[34000]: 2025-07-07 20:12:43.013287346 -0400 EDT m=+0.089639510 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34011]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 conmon[34011]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34015]: 2025-07-07 20:12:43.048792098 -0400 EDT m=+0.022901709 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 podman[34015]: 2025-07-07 20:12:43.061583347 -0400 EDT m=+0.035692962 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3795. Jul 07 20:12:43 managed-node1 podman[34015]: 2025-07-07 20:12:43.109393741 -0400 EDT m=+0.083503343 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:43 managed-node1 podman[34015]: 2025-07-07 20:12:43.112325979 -0400 EDT m=+0.086435576 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34027]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 conmon[34027]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34031]: 2025-07-07 20:12:43.147824494 -0400 EDT m=+0.022685823 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 podman[34031]: 2025-07-07 20:12:43.160367485 -0400 EDT m=+0.035228812 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3800. Jul 07 20:12:43 managed-node1 podman[34031]: 2025-07-07 20:12:43.208261872 -0400 EDT m=+0.083123369 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 podman[34031]: 2025-07-07 20:12:43.211258219 -0400 EDT m=+0.086119570 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34042]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 conmon[34042]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34046]: 2025-07-07 20:12:43.246832569 -0400 EDT m=+0.022325446 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:43 managed-node1 podman[34046]: 2025-07-07 20:12:43.259383759 -0400 EDT m=+0.034876604 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3805. Jul 07 20:12:43 managed-node1 podman[34046]: 2025-07-07 20:12:43.305492865 -0400 EDT m=+0.080985790 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 podman[34046]: 2025-07-07 20:12:43.308387804 -0400 EDT m=+0.083880721 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34057]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 conmon[34057]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34061]: 2025-07-07 20:12:43.34873328 -0400 EDT m=+0.027342276 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 podman[34061]: 2025-07-07 20:12:43.363474331 -0400 EDT m=+0.042083270 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3810. Jul 07 20:12:43 managed-node1 podman[34061]: 2025-07-07 20:12:43.410621492 -0400 EDT m=+0.089230429 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:43 managed-node1 podman[34061]: 2025-07-07 20:12:43.413616835 -0400 EDT m=+0.092225843 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34073]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 conmon[34073]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34077]: 2025-07-07 20:12:43.448247251 -0400 EDT m=+0.020927733 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:43 managed-node1 podman[34077]: 2025-07-07 20:12:43.461523867 -0400 EDT m=+0.034204271 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3815. Jul 07 20:12:43 managed-node1 podman[34077]: 2025-07-07 20:12:43.505693928 -0400 EDT m=+0.078374360 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:43 managed-node1 podman[34077]: 2025-07-07 20:12:43.50945538 -0400 EDT m=+0.082135854 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34089]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 conmon[34089]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34093]: 2025-07-07 20:12:43.542568623 -0400 EDT m=+0.023392139 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:43 managed-node1 podman[34093]: 2025-07-07 20:12:43.554898291 -0400 EDT m=+0.035721832 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3820. Jul 07 20:12:43 managed-node1 podman[34093]: 2025-07-07 20:12:43.604504653 -0400 EDT m=+0.085328217 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 podman[34093]: 2025-07-07 20:12:43.608980924 -0400 EDT m=+0.089804434 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34104]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 conmon[34104]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34108]: 2025-07-07 20:12:43.643575359 -0400 EDT m=+0.021129814 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:43 managed-node1 podman[34108]: 2025-07-07 20:12:43.656332565 -0400 EDT m=+0.033887028 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3825. Jul 07 20:12:43 managed-node1 podman[34108]: 2025-07-07 20:12:43.69945226 -0400 EDT m=+0.077006794 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 podman[34108]: 2025-07-07 20:12:43.702989717 -0400 EDT m=+0.080544207 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34119]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 podman[34123]: 2025-07-07 20:12:43.737060945 -0400 EDT m=+0.024227188 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 podman[34123]: 2025-07-07 20:12:43.750183557 -0400 EDT m=+0.037349790 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3830. Jul 07 20:12:43 managed-node1 podman[34123]: 2025-07-07 20:12:43.79819496 -0400 EDT m=+0.085361237 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 podman[34123]: 2025-07-07 20:12:43.801180924 -0400 EDT m=+0.088347172 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34134]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 conmon[34134]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34138]: 2025-07-07 20:12:43.835320547 -0400 EDT m=+0.021567597 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 podman[34138]: 2025-07-07 20:12:43.847704391 -0400 EDT m=+0.033951418 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3835. Jul 07 20:12:43 managed-node1 podman[34138]: 2025-07-07 20:12:43.894082011 -0400 EDT m=+0.080329100 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:43 managed-node1 podman[34138]: 2025-07-07 20:12:43.897088852 -0400 EDT m=+0.083336170 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34149]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 conmon[34149]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:43 managed-node1 podman[34153]: 2025-07-07 20:12:43.933651016 -0400 EDT m=+0.022365107 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:43 managed-node1 podman[34153]: 2025-07-07 20:12:43.946066969 -0400 EDT m=+0.034781019 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3840. Jul 07 20:12:43 managed-node1 podman[34153]: 2025-07-07 20:12:43.994562003 -0400 EDT m=+0.083276126 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:43 managed-node1 podman[34153]: 2025-07-07 20:12:43.997511538 -0400 EDT m=+0.086225656 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34165]: This container is intended for podman CI testing Jul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 podman[34169]: 2025-07-07 20:12:44.031715352 -0400 EDT m=+0.024332851 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:44 managed-node1 podman[34169]: 2025-07-07 20:12:44.044791422 -0400 EDT m=+0.037408902 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3845. Jul 07 20:12:44 managed-node1 podman[34169]: 2025-07-07 20:12:44.093420708 -0400 EDT m=+0.086038408 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34180]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 conmon[34180]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:44 managed-node1 podman[34169]: 2025-07-07 20:12:44.099571715 -0400 EDT m=+0.092189277 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:44 managed-node1 podman[34184]: 2025-07-07 20:12:44.130879463 -0400 EDT m=+0.022875842 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:44 managed-node1 podman[34184]: 2025-07-07 20:12:44.143583403 -0400 EDT m=+0.035579729 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3850. Jul 07 20:12:44 managed-node1 podman[34184]: 2025-07-07 20:12:44.192949135 -0400 EDT m=+0.084945537 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:44 managed-node1 podman[34184]: 2025-07-07 20:12:44.196037702 -0400 EDT m=+0.088034089 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34195]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 conmon[34195]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:44 managed-node1 podman[34199]: 2025-07-07 20:12:44.232452577 -0400 EDT m=+0.023557794 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:44 managed-node1 podman[34199]: 2025-07-07 20:12:44.245164617 -0400 EDT m=+0.036269711 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3855. Jul 07 20:12:44 managed-node1 podman[34199]: 2025-07-07 20:12:44.289637994 -0400 EDT m=+0.080743233 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34211]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 conmon[34211]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:44 managed-node1 podman[34199]: 2025-07-07 20:12:44.296236962 -0400 EDT m=+0.087342134 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:44 managed-node1 podman[34215]: 2025-07-07 20:12:44.334423585 -0400 EDT m=+0.026223637 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:44 managed-node1 podman[34215]: 2025-07-07 20:12:44.34696504 -0400 EDT m=+0.038765223 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3860. Jul 07 20:12:44 managed-node1 podman[34215]: 2025-07-07 20:12:44.394138504 -0400 EDT m=+0.085938568 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 podman[34215]: 2025-07-07 20:12:44.398059266 -0400 EDT m=+0.089859394 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34227]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 podman[34231]: 2025-07-07 20:12:44.433541456 -0400 EDT m=+0.023710477 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:44 managed-node1 podman[34231]: 2025-07-07 20:12:44.446525508 -0400 EDT m=+0.036694502 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3865. Jul 07 20:12:44 managed-node1 podman[34231]: 2025-07-07 20:12:44.499327904 -0400 EDT m=+0.089496946 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:44 managed-node1 podman[34231]: 2025-07-07 20:12:44.502313318 -0400 EDT m=+0.092482358 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34242]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 conmon[34242]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:44 managed-node1 podman[34246]: 2025-07-07 20:12:44.538737175 -0400 EDT m=+0.023233861 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:44 managed-node1 podman[34246]: 2025-07-07 20:12:44.551520023 -0400 EDT m=+0.036016714 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3870. Jul 07 20:12:44 managed-node1 podman[34246]: 2025-07-07 20:12:44.603486031 -0400 EDT m=+0.087982819 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:44 managed-node1 podman[34246]: 2025-07-07 20:12:44.606474049 -0400 EDT m=+0.090970895 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34257]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 podman[34261]: 2025-07-07 20:12:44.642852207 -0400 EDT m=+0.021419460 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:44 managed-node1 podman[34261]: 2025-07-07 20:12:44.655259328 -0400 EDT m=+0.033826523 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3875. Jul 07 20:12:44 managed-node1 podman[34261]: 2025-07-07 20:12:44.704108726 -0400 EDT m=+0.082675932 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:44 managed-node1 podman[34261]: 2025-07-07 20:12:44.707014409 -0400 EDT m=+0.085581642 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34273]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 conmon[34273]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:44 managed-node1 podman[34277]: 2025-07-07 20:12:44.741456944 -0400 EDT m=+0.020475966 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:44 managed-node1 podman[34277]: 2025-07-07 20:12:44.754038707 -0400 EDT m=+0.033057743 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3880. Jul 07 20:12:44 managed-node1 podman[34277]: 2025-07-07 20:12:44.798618058 -0400 EDT m=+0.077637084 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:44 managed-node1 podman[34277]: 2025-07-07 20:12:44.801551057 -0400 EDT m=+0.080570258 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34288]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 podman[34292]: 2025-07-07 20:12:44.835424947 -0400 EDT m=+0.021183939 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:44 managed-node1 podman[34292]: 2025-07-07 20:12:44.848104715 -0400 EDT m=+0.033863640 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3885. Jul 07 20:12:44 managed-node1 podman[34292]: 2025-07-07 20:12:44.89843642 -0400 EDT m=+0.084195352 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:44 managed-node1 podman[34292]: 2025-07-07 20:12:44.901416892 -0400 EDT m=+0.087175919 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34303]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 conmon[34303]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:44 managed-node1 podman[34307]: 2025-07-07 20:12:44.93488204 -0400 EDT m=+0.021084894 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:44 managed-node1 podman[34307]: 2025-07-07 20:12:44.948114757 -0400 EDT m=+0.034317523 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3890. Jul 07 20:12:44 managed-node1 podman[34307]: 2025-07-07 20:12:44.99428785 -0400 EDT m=+0.080490660 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34318]: This container is intended for podman CI testing Jul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:44 managed-node1 podman[34307]: 2025-07-07 20:12:44.997964153 -0400 EDT m=+0.084166944 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:45 managed-node1 podman[34322]: 2025-07-07 20:12:45.030961646 -0400 EDT m=+0.021565433 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:45 managed-node1 podman[34322]: 2025-07-07 20:12:45.047502169 -0400 EDT m=+0.038105908 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3895. Jul 07 20:12:45 managed-node1 podman[34322]: 2025-07-07 20:12:45.103724388 -0400 EDT m=+0.094328080 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:45 managed-node1 podman[34322]: 2025-07-07 20:12:45.106984259 -0400 EDT m=+0.097587984 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34364]: This container is intended for podman CI testing Jul 07 20:12:45 managed-node1 conmon[34364]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:45 managed-node1 podman[34389]: 2025-07-07 20:12:45.157943882 -0400 EDT m=+0.030560165 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:45 managed-node1 podman[34389]: 2025-07-07 20:12:45.170688107 -0400 EDT m=+0.043304256 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3900. Jul 07 20:12:45 managed-node1 podman[34389]: 2025-07-07 20:12:45.234124933 -0400 EDT m=+0.106741355 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:45 managed-node1 podman[34389]: 2025-07-07 20:12:45.238490952 -0400 EDT m=+0.111107125 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34447]: This container is intended for podman CI testing Jul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:45 managed-node1 conmon[34447]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:45 managed-node1 podman[34477]: 2025-07-07 20:12:45.288662798 -0400 EDT m=+0.034016222 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:45 managed-node1 podman[34477]: 2025-07-07 20:12:45.301874445 -0400 EDT m=+0.047228008 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3905. Jul 07 20:12:45 managed-node1 podman[34477]: 2025-07-07 20:12:45.381555979 -0400 EDT m=+0.126909625 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34488]: This container is intended for podman CI testing Jul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:45 managed-node1 podman[34477]: 2025-07-07 20:12:45.38672817 -0400 EDT m=+0.132081609 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 python3.9[34476]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:45 managed-node1 podman[34492]: 2025-07-07 20:12:45.438723941 -0400 EDT m=+0.040591696 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 podman[34492]: 2025-07-07 20:12:45.453284139 -0400 EDT m=+0.055151863 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3910. Jul 07 20:12:45 managed-node1 podman[34492]: 2025-07-07 20:12:45.505040801 -0400 EDT m=+0.106908624 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:45 managed-node1 podman[34492]: 2025-07-07 20:12:45.508106515 -0400 EDT m=+0.109974334 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34530]: This container is intended for podman CI testing Jul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:45 managed-node1 conmon[34530]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:45 managed-node1 podman[34534]: 2025-07-07 20:12:45.551903395 -0400 EDT m=+0.029054384 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:45 managed-node1 podman[34534]: 2025-07-07 20:12:45.565026326 -0400 EDT m=+0.042177295 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3915. Jul 07 20:12:45 managed-node1 podman[34534]: 2025-07-07 20:12:45.612904168 -0400 EDT m=+0.090055136 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34546]: This container is intended for podman CI testing Jul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:45 managed-node1 podman[34534]: 2025-07-07 20:12:45.616986931 -0400 EDT m=+0.094138023 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:45 managed-node1 podman[34550]: 2025-07-07 20:12:45.65020363 -0400 EDT m=+0.022500482 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:45 managed-node1 podman[34550]: 2025-07-07 20:12:45.662323598 -0400 EDT m=+0.034620330 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3920. Jul 07 20:12:45 managed-node1 podman[34550]: 2025-07-07 20:12:45.707102849 -0400 EDT m=+0.079399643 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:45 managed-node1 podman[34550]: 2025-07-07 20:12:45.710030304 -0400 EDT m=+0.082327099 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34561]: This container is intended for podman CI testing Jul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:45 managed-node1 podman[34565]: 2025-07-07 20:12:45.741502106 -0400 EDT m=+0.022128096 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:45 managed-node1 podman[34565]: 2025-07-07 20:12:45.754021345 -0400 EDT m=+0.034647277 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3925. Jul 07 20:12:45 managed-node1 podman[34565]: 2025-07-07 20:12:45.802124996 -0400 EDT m=+0.082750990 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:45 managed-node1 podman[34565]: 2025-07-07 20:12:45.805080684 -0400 EDT m=+0.085706677 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34576]: This container is intended for podman CI testing Jul 07 20:12:45 managed-node1 conmon[34576]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:45 managed-node1 podman[34580]: 2025-07-07 20:12:45.839082255 -0400 EDT m=+0.020790370 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:45 managed-node1 podman[34580]: 2025-07-07 20:12:45.851852538 -0400 EDT m=+0.033560581 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3930. Jul 07 20:12:45 managed-node1 podman[34580]: 2025-07-07 20:12:45.898524256 -0400 EDT m=+0.080232378 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 podman[34580]: 2025-07-07 20:12:45.901532521 -0400 EDT m=+0.083240667 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34591]: This container is intended for podman CI testing Jul 07 20:12:45 managed-node1 conmon[34591]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:45 managed-node1 podman[34595]: 2025-07-07 20:12:45.939231145 -0400 EDT m=+0.023588832 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:45 managed-node1 podman[34595]: 2025-07-07 20:12:45.952028898 -0400 EDT m=+0.036386479 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3935. Jul 07 20:12:46 managed-node1 podman[34595]: 2025-07-07 20:12:46.000385348 -0400 EDT m=+0.084742969 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:46 managed-node1 podman[34595]: 2025-07-07 20:12:46.003323704 -0400 EDT m=+0.087681325 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34606]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 podman[34610]: 2025-07-07 20:12:46.038256205 -0400 EDT m=+0.023133829 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:46 managed-node1 podman[34610]: 2025-07-07 20:12:46.050563115 -0400 EDT m=+0.035440605 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3940. Jul 07 20:12:46 managed-node1 podman[34610]: 2025-07-07 20:12:46.097984316 -0400 EDT m=+0.082862186 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:46 managed-node1 podman[34610]: 2025-07-07 20:12:46.101388337 -0400 EDT m=+0.086265909 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34621]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 conmon[34621]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:46 managed-node1 podman[34625]: 2025-07-07 20:12:46.13377981 -0400 EDT m=+0.022146824 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 podman[34625]: 2025-07-07 20:12:46.146317225 -0400 EDT m=+0.034684206 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3945. Jul 07 20:12:46 managed-node1 podman[34625]: 2025-07-07 20:12:46.188421525 -0400 EDT m=+0.076788579 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:46 managed-node1 podman[34625]: 2025-07-07 20:12:46.191434273 -0400 EDT m=+0.079801340 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34636]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 conmon[34636]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:46 managed-node1 podman[34640]: 2025-07-07 20:12:46.223847962 -0400 EDT m=+0.022003559 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:46 managed-node1 podman[34640]: 2025-07-07 20:12:46.236340875 -0400 EDT m=+0.034496479 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3950. Jul 07 20:12:46 managed-node1 podman[34640]: 2025-07-07 20:12:46.284186503 -0400 EDT m=+0.082342176 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 podman[34640]: 2025-07-07 20:12:46.28721142 -0400 EDT m=+0.085367074 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34651]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 conmon[34651]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:46 managed-node1 podman[34655]: 2025-07-07 20:12:46.324290164 -0400 EDT m=+0.022430941 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 podman[34655]: 2025-07-07 20:12:46.337132899 -0400 EDT m=+0.035273639 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3955. Jul 07 20:12:46 managed-node1 podman[34655]: 2025-07-07 20:12:46.381677742 -0400 EDT m=+0.079818544 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 podman[34655]: 2025-07-07 20:12:46.384681206 -0400 EDT m=+0.082821978 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34666]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 podman[34672]: 2025-07-07 20:12:46.423094224 -0400 EDT m=+0.029116182 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:46 managed-node1 podman[34672]: 2025-07-07 20:12:46.436067954 -0400 EDT m=+0.042089938 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3960. Jul 07 20:12:46 managed-node1 podman[34672]: 2025-07-07 20:12:46.483206874 -0400 EDT m=+0.089228758 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:46 managed-node1 podman[34672]: 2025-07-07 20:12:46.48613867 -0400 EDT m=+0.092160608 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34683]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 conmon[34683]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:46 managed-node1 podman[34687]: 2025-07-07 20:12:46.520804338 -0400 EDT m=+0.021717877 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 podman[34687]: 2025-07-07 20:12:46.533304539 -0400 EDT m=+0.034218030 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3965. Jul 07 20:12:46 managed-node1 podman[34687]: 2025-07-07 20:12:46.585096331 -0400 EDT m=+0.086009885 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:46 managed-node1 podman[34687]: 2025-07-07 20:12:46.588088229 -0400 EDT m=+0.089001762 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34698]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 podman[34702]: 2025-07-07 20:12:46.624273128 -0400 EDT m=+0.022844259 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 podman[34702]: 2025-07-07 20:12:46.636433128 -0400 EDT m=+0.035004218 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3970. Jul 07 20:12:46 managed-node1 podman[34702]: 2025-07-07 20:12:46.688355591 -0400 EDT m=+0.086926731 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34713]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 podman[34702]: 2025-07-07 20:12:46.691731915 -0400 EDT m=+0.090303278 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 conmon[34713]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:46 managed-node1 podman[34717]: 2025-07-07 20:12:46.726479493 -0400 EDT m=+0.022453022 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:46 managed-node1 podman[34717]: 2025-07-07 20:12:46.738600586 -0400 EDT m=+0.034574136 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3975. Jul 07 20:12:46 managed-node1 podman[34717]: 2025-07-07 20:12:46.789135405 -0400 EDT m=+0.085109004 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 podman[34717]: 2025-07-07 20:12:46.793455608 -0400 EDT m=+0.089429341 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34728]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 conmon[34728]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:46 managed-node1 podman[34732]: 2025-07-07 20:12:46.828415822 -0400 EDT m=+0.022161399 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 podman[34732]: 2025-07-07 20:12:46.840867235 -0400 EDT m=+0.034612783 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3980. Jul 07 20:12:46 managed-node1 podman[34732]: 2025-07-07 20:12:46.897505231 -0400 EDT m=+0.091250903 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34761]: This container is intended for podman CI testing Jul 07 20:12:46 managed-node1 conmon[34761]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:46 managed-node1 podman[34732]: 2025-07-07 20:12:46.90395772 -0400 EDT m=+0.097703287 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:46 managed-node1 podman[34776]: 2025-07-07 20:12:46.948881564 -0400 EDT m=+0.030238604 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:46 managed-node1 podman[34776]: 2025-07-07 20:12:46.962711084 -0400 EDT m=+0.044068090 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3985. Jul 07 20:12:47 managed-node1 podman[34776]: 2025-07-07 20:12:47.024077481 -0400 EDT m=+0.105434639 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34834]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 conmon[34834]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:47 managed-node1 podman[34776]: 2025-07-07 20:12:47.043812635 -0400 EDT m=+0.125169644 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:47 managed-node1 podman[34862]: 2025-07-07 20:12:47.0939691 -0400 EDT m=+0.037738572 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:47 managed-node1 podman[34862]: 2025-07-07 20:12:47.108160326 -0400 EDT m=+0.051929759 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:47 managed-node1 python3.9[34893]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3990. Jul 07 20:12:47 managed-node1 podman[34862]: 2025-07-07 20:12:47.242082057 -0400 EDT m=+0.185851724 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34900]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 podman[34862]: 2025-07-07 20:12:47.247567844 -0400 EDT m=+0.191337205 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 podman[34906]: 2025-07-07 20:12:47.298810756 -0400 EDT m=+0.036371284 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:47 managed-node1 podman[34906]: 2025-07-07 20:12:47.311789612 -0400 EDT m=+0.049350163 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 3995. Jul 07 20:12:47 managed-node1 podman[34906]: 2025-07-07 20:12:47.358183364 -0400 EDT m=+0.095743929 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:47 managed-node1 podman[34906]: 2025-07-07 20:12:47.361134485 -0400 EDT m=+0.098695044 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34941]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 podman[34945]: 2025-07-07 20:12:47.395146285 -0400 EDT m=+0.022822779 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:47 managed-node1 podman[34945]: 2025-07-07 20:12:47.407443176 -0400 EDT m=+0.035119577 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4000. Jul 07 20:12:47 managed-node1 podman[34945]: 2025-07-07 20:12:47.453148133 -0400 EDT m=+0.080824579 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:47 managed-node1 podman[34945]: 2025-07-07 20:12:47.456172469 -0400 EDT m=+0.083848916 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34956]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 conmon[34956]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:47 managed-node1 podman[34960]: 2025-07-07 20:12:47.496013156 -0400 EDT m=+0.030390600 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:47 managed-node1 podman[34960]: 2025-07-07 20:12:47.508423642 -0400 EDT m=+0.042801057 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4005. Jul 07 20:12:47 managed-node1 podman[34960]: 2025-07-07 20:12:47.560213565 -0400 EDT m=+0.094591028 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 podman[34960]: 2025-07-07 20:12:47.563206119 -0400 EDT m=+0.097583597 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34971]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 conmon[34971]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:47 managed-node1 podman[34976]: 2025-07-07 20:12:47.596412334 -0400 EDT m=+0.022899092 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:47 managed-node1 podman[34976]: 2025-07-07 20:12:47.60891684 -0400 EDT m=+0.035403593 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4010. Jul 07 20:12:47 managed-node1 podman[34976]: 2025-07-07 20:12:47.657587558 -0400 EDT m=+0.084074365 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:47 managed-node1 podman[34976]: 2025-07-07 20:12:47.660524503 -0400 EDT m=+0.087011410 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34987]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 conmon[34987]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:47 managed-node1 podman[34991]: 2025-07-07 20:12:47.695593786 -0400 EDT m=+0.022541319 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:47 managed-node1 podman[34991]: 2025-07-07 20:12:47.708183773 -0400 EDT m=+0.035131258 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4015. Jul 07 20:12:47 managed-node1 podman[34991]: 2025-07-07 20:12:47.750396628 -0400 EDT m=+0.077344189 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:47 managed-node1 podman[34991]: 2025-07-07 20:12:47.7538329 -0400 EDT m=+0.080780443 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[35002]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 podman[35006]: 2025-07-07 20:12:47.789148197 -0400 EDT m=+0.022104143 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:47 managed-node1 podman[35006]: 2025-07-07 20:12:47.801861569 -0400 EDT m=+0.034817418 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4020. Jul 07 20:12:47 managed-node1 podman[35006]: 2025-07-07 20:12:47.849787507 -0400 EDT m=+0.082743391 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:47 managed-node1 podman[35006]: 2025-07-07 20:12:47.852788948 -0400 EDT m=+0.085745045 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[35017]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 conmon[35017]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:47 managed-node1 podman[35021]: 2025-07-07 20:12:47.886433326 -0400 EDT m=+0.023333664 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:47 managed-node1 podman[35021]: 2025-07-07 20:12:47.898635068 -0400 EDT m=+0.035535395 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4025. Jul 07 20:12:47 managed-node1 podman[35021]: 2025-07-07 20:12:47.94781473 -0400 EDT m=+0.084715070 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 podman[35021]: 2025-07-07 20:12:47.950786701 -0400 EDT m=+0.087687110 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[35032]: This container is intended for podman CI testing Jul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:47 managed-node1 conmon[35032]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:47 managed-node1 podman[35036]: 2025-07-07 20:12:47.985090075 -0400 EDT m=+0.020824461 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:47 managed-node1 podman[35036]: 2025-07-07 20:12:47.99765414 -0400 EDT m=+0.033388507 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4030. Jul 07 20:12:48 managed-node1 podman[35036]: 2025-07-07 20:12:48.045157855 -0400 EDT m=+0.080892244 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 podman[35036]: 2025-07-07 20:12:48.048132676 -0400 EDT m=+0.083867091 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35048]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 conmon[35048]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:48 managed-node1 podman[35052]: 2025-07-07 20:12:48.082018163 -0400 EDT m=+0.023940931 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:48 managed-node1 podman[35052]: 2025-07-07 20:12:48.094295104 -0400 EDT m=+0.036217821 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4035. Jul 07 20:12:48 managed-node1 podman[35052]: 2025-07-07 20:12:48.137986786 -0400 EDT m=+0.079909620 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:48 managed-node1 podman[35052]: 2025-07-07 20:12:48.14089375 -0400 EDT m=+0.082816485 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35063]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 conmon[35063]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:48 managed-node1 podman[35067]: 2025-07-07 20:12:48.174678497 -0400 EDT m=+0.023706278 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:48 managed-node1 podman[35067]: 2025-07-07 20:12:48.18747958 -0400 EDT m=+0.036507308 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4040. Jul 07 20:12:48 managed-node1 podman[35067]: 2025-07-07 20:12:48.233346802 -0400 EDT m=+0.082374550 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35079]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 podman[35067]: 2025-07-07 20:12:48.236827618 -0400 EDT m=+0.085855456 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 conmon[35079]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:48 managed-node1 podman[35083]: 2025-07-07 20:12:48.274803973 -0400 EDT m=+0.023869804 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:48 managed-node1 podman[35083]: 2025-07-07 20:12:48.287274677 -0400 EDT m=+0.036340490 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4045. Jul 07 20:12:48 managed-node1 podman[35083]: 2025-07-07 20:12:48.333271188 -0400 EDT m=+0.082337067 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:48 managed-node1 podman[35083]: 2025-07-07 20:12:48.337315563 -0400 EDT m=+0.086381467 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35094]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 conmon[35094]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:48 managed-node1 podman[35098]: 2025-07-07 20:12:48.367811267 -0400 EDT m=+0.020607548 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 podman[35098]: 2025-07-07 20:12:48.380577814 -0400 EDT m=+0.033374070 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4050. Jul 07 20:12:48 managed-node1 podman[35098]: 2025-07-07 20:12:48.425671708 -0400 EDT m=+0.078467975 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:48 managed-node1 podman[35098]: 2025-07-07 20:12:48.429157068 -0400 EDT m=+0.081953417 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35110]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 conmon[35110]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:48 managed-node1 podman[35114]: 2025-07-07 20:12:48.462682903 -0400 EDT m=+0.023415424 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 podman[35114]: 2025-07-07 20:12:48.475209185 -0400 EDT m=+0.035941652 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4055. Jul 07 20:12:48 managed-node1 podman[35114]: 2025-07-07 20:12:48.528134339 -0400 EDT m=+0.088866851 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35125]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 podman[35114]: 2025-07-07 20:12:48.532354356 -0400 EDT m=+0.093086760 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 podman[35129]: 2025-07-07 20:12:48.564658555 -0400 EDT m=+0.021999487 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:48 managed-node1 podman[35129]: 2025-07-07 20:12:48.578013664 -0400 EDT m=+0.035354664 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4060. Jul 07 20:12:48 managed-node1 podman[35129]: 2025-07-07 20:12:48.635246136 -0400 EDT m=+0.092587077 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:48 managed-node1 podman[35129]: 2025-07-07 20:12:48.638726936 -0400 EDT m=+0.096067987 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35140]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 conmon[35140]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 podman[35144]: 2025-07-07 20:12:48.674083012 -0400 EDT m=+0.022192448 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:48 managed-node1 podman[35144]: 2025-07-07 20:12:48.68682521 -0400 EDT m=+0.034934549 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4065. Jul 07 20:12:48 managed-node1 podman[35144]: 2025-07-07 20:12:48.728260743 -0400 EDT m=+0.076370138 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:48 managed-node1 podman[35144]: 2025-07-07 20:12:48.731194428 -0400 EDT m=+0.079304101 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35155]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 conmon[35155]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:48 managed-node1 podman[35159]: 2025-07-07 20:12:48.762366829 -0400 EDT m=+0.020915245 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:48 managed-node1 podman[35159]: 2025-07-07 20:12:48.775477215 -0400 EDT m=+0.034025616 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4070. Jul 07 20:12:48 managed-node1 podman[35159]: 2025-07-07 20:12:48.820672045 -0400 EDT m=+0.079220476 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35170]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 conmon[35170]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:48 managed-node1 podman[35159]: 2025-07-07 20:12:48.826968782 -0400 EDT m=+0.085517199 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 podman[35174]: 2025-07-07 20:12:48.859644209 -0400 EDT m=+0.023867878 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:48 managed-node1 podman[35174]: 2025-07-07 20:12:48.873025178 -0400 EDT m=+0.037249131 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4075. Jul 07 20:12:48 managed-node1 podman[35174]: 2025-07-07 20:12:48.930458545 -0400 EDT m=+0.094682286 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:48 managed-node1 podman[35174]: 2025-07-07 20:12:48.933368693 -0400 EDT m=+0.097592453 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35197]: This container is intended for podman CI testing Jul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:48 managed-node1 podman[35213]: 2025-07-07 20:12:48.980696341 -0400 EDT m=+0.028846487 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:48 managed-node1 podman[35213]: 2025-07-07 20:12:48.994689581 -0400 EDT m=+0.042839889 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4080. Jul 07 20:12:49 managed-node1 podman[35213]: 2025-07-07 20:12:49.051661925 -0400 EDT m=+0.099812226 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35275]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 conmon[35275]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:49 managed-node1 podman[35213]: 2025-07-07 20:12:49.057246727 -0400 EDT m=+0.105397028 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:49 managed-node1 podman[35296]: 2025-07-07 20:12:49.100010948 -0400 EDT m=+0.028683846 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 podman[35296]: 2025-07-07 20:12:49.11603344 -0400 EDT m=+0.044706047 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4085. Jul 07 20:12:49 managed-node1 podman[35296]: 2025-07-07 20:12:49.17786511 -0400 EDT m=+0.106537682 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35340]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 conmon[35340]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:49 managed-node1 podman[35296]: 2025-07-07 20:12:49.184863676 -0400 EDT m=+0.113536155 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 podman[35344]: 2025-07-07 20:12:49.245543717 -0400 EDT m=+0.045029030 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:49 managed-node1 podman[35344]: 2025-07-07 20:12:49.259415611 -0400 EDT m=+0.058900865 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:49 managed-node1 python3.9[35335]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4090. Jul 07 20:12:49 managed-node1 podman[35344]: 2025-07-07 20:12:49.320731505 -0400 EDT m=+0.120216683 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 podman[35344]: 2025-07-07 20:12:49.326415099 -0400 EDT m=+0.125900275 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35357]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 conmon[35357]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:49 managed-node1 podman[35369]: 2025-07-07 20:12:49.372946551 -0400 EDT m=+0.028761302 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:49 managed-node1 podman[35369]: 2025-07-07 20:12:49.385464131 -0400 EDT m=+0.041278816 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4095. Jul 07 20:12:49 managed-node1 podman[35369]: 2025-07-07 20:12:49.42850947 -0400 EDT m=+0.084324163 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:49 managed-node1 podman[35369]: 2025-07-07 20:12:49.432127873 -0400 EDT m=+0.087942624 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35396]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 conmon[35396]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:49 managed-node1 podman[35400]: 2025-07-07 20:12:49.468834038 -0400 EDT m=+0.022128269 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:49 managed-node1 podman[35400]: 2025-07-07 20:12:49.481687819 -0400 EDT m=+0.034982048 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4100. Jul 07 20:12:49 managed-node1 podman[35400]: 2025-07-07 20:12:49.528341097 -0400 EDT m=+0.081635334 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 podman[35400]: 2025-07-07 20:12:49.532328195 -0400 EDT m=+0.085622512 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35411]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 podman[35415]: 2025-07-07 20:12:49.564408437 -0400 EDT m=+0.022548782 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:49 managed-node1 podman[35415]: 2025-07-07 20:12:49.577082086 -0400 EDT m=+0.035222377 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4105. Jul 07 20:12:49 managed-node1 podman[35415]: 2025-07-07 20:12:49.626428781 -0400 EDT m=+0.084569062 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:49 managed-node1 podman[35415]: 2025-07-07 20:12:49.629507264 -0400 EDT m=+0.087647538 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35427]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 podman[35431]: 2025-07-07 20:12:49.663957045 -0400 EDT m=+0.021447599 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:49 managed-node1 podman[35431]: 2025-07-07 20:12:49.676778631 -0400 EDT m=+0.034269117 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4110. Jul 07 20:12:49 managed-node1 podman[35431]: 2025-07-07 20:12:49.724417963 -0400 EDT m=+0.081908513 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35443]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 podman[35431]: 2025-07-07 20:12:49.728334134 -0400 EDT m=+0.085824587 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:49 managed-node1 podman[35447]: 2025-07-07 20:12:49.763470561 -0400 EDT m=+0.022453377 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 podman[35447]: 2025-07-07 20:12:49.776036134 -0400 EDT m=+0.035018902 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4115. Jul 07 20:12:49 managed-node1 podman[35447]: 2025-07-07 20:12:49.826175596 -0400 EDT m=+0.085158416 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:49 managed-node1 podman[35447]: 2025-07-07 20:12:49.829136491 -0400 EDT m=+0.088119344 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35458]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 conmon[35458]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:49 managed-node1 podman[35462]: 2025-07-07 20:12:49.860688957 -0400 EDT m=+0.022137746 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:49 managed-node1 podman[35462]: 2025-07-07 20:12:49.873165971 -0400 EDT m=+0.034614682 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4120. Jul 07 20:12:49 managed-node1 podman[35462]: 2025-07-07 20:12:49.916149214 -0400 EDT m=+0.077597920 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 podman[35462]: 2025-07-07 20:12:49.919118383 -0400 EDT m=+0.080567116 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35473]: This container is intended for podman CI testing Jul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:49 managed-node1 podman[35477]: 2025-07-07 20:12:49.955542698 -0400 EDT m=+0.022514779 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:49 managed-node1 podman[35477]: 2025-07-07 20:12:49.968269198 -0400 EDT m=+0.035241281 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4125. Jul 07 20:12:50 managed-node1 podman[35477]: 2025-07-07 20:12:50.014342683 -0400 EDT m=+0.081314820 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 podman[35477]: 2025-07-07 20:12:50.01878655 -0400 EDT m=+0.085758758 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35488]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 podman[35492]: 2025-07-07 20:12:50.053363645 -0400 EDT m=+0.024574745 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:50 managed-node1 podman[35492]: 2025-07-07 20:12:50.066171766 -0400 EDT m=+0.037383115 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4130. Jul 07 20:12:50 managed-node1 podman[35492]: 2025-07-07 20:12:50.115050823 -0400 EDT m=+0.086262000 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:50 managed-node1 podman[35492]: 2025-07-07 20:12:50.118910096 -0400 EDT m=+0.090121505 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35503]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 conmon[35503]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:50 managed-node1 podman[35507]: 2025-07-07 20:12:50.153822119 -0400 EDT m=+0.021646500 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:50 managed-node1 podman[35507]: 2025-07-07 20:12:50.166416474 -0400 EDT m=+0.034240783 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4135. Jul 07 20:12:50 managed-node1 podman[35507]: 2025-07-07 20:12:50.214244614 -0400 EDT m=+0.082068966 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:50 managed-node1 podman[35507]: 2025-07-07 20:12:50.217195367 -0400 EDT m=+0.085019732 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35519]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 podman[35523]: 2025-07-07 20:12:50.248680627 -0400 EDT m=+0.020233109 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 podman[35523]: 2025-07-07 20:12:50.261496823 -0400 EDT m=+0.033049364 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4140. Jul 07 20:12:50 managed-node1 podman[35523]: 2025-07-07 20:12:50.309309961 -0400 EDT m=+0.080862551 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 podman[35523]: 2025-07-07 20:12:50.312592255 -0400 EDT m=+0.084144814 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35534]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.361351804 -0400 EDT m=+0.036190231 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.374459829 -0400 EDT m=+0.049298407 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4145. Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.422744257 -0400 EDT m=+0.097582608 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35595]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.42588667 -0400 EDT m=+0.100725056 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 conmon[35595]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.477186688 -0400 EDT m=+0.035333022 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.491409753 -0400 EDT m=+0.049556363 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4150. Jul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.552263881 -0400 EDT m=+0.110410323 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35663]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 conmon[35663]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.557866714 -0400 EDT m=+0.116012971 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.620063202 -0400 EDT m=+0.048519999 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.633249443 -0400 EDT m=+0.061706156 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4155. Jul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.692351489 -0400 EDT m=+0.120808216 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35704]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.69827612 -0400 EDT m=+0.126732782 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:50 managed-node1 python3.9[35698]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.759561182 -0400 EDT m=+0.046849024 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.77735646 -0400 EDT m=+0.064644058 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4160. Jul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.833194605 -0400 EDT m=+0.120482053 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.836115952 -0400 EDT m=+0.123403410 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35741]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.869261632 -0400 EDT m=+0.023764777 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.881551249 -0400 EDT m=+0.036054336 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4165. Jul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.928402597 -0400 EDT m=+0.082905709 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.931314876 -0400 EDT m=+0.085818011 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35759]: This container is intended for podman CI testing Jul 07 20:12:50 managed-node1 conmon[35759]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:50 managed-node1 podman[35763]: 2025-07-07 20:12:50.96332082 -0400 EDT m=+0.021467626 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:50 managed-node1 podman[35763]: 2025-07-07 20:12:50.976815265 -0400 EDT m=+0.034961991 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4170. Jul 07 20:12:51 managed-node1 podman[35763]: 2025-07-07 20:12:51.044375911 -0400 EDT m=+0.102522824 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35798]: This container is intended for podman CI testing Jul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:51 managed-node1 podman[35763]: 2025-07-07 20:12:51.049348235 -0400 EDT m=+0.107495274 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.092437769 -0400 EDT m=+0.029222529 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.107275694 -0400 EDT m=+0.044060322 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z) Jul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4175. Jul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.15929211 -0400 EDT m=+0.096076800 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.163527993 -0400 EDT m=+0.100312554 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35867]: This container is intended for podman CI testing Jul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:51 managed-node1 conmon[35867]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.225315735 -0400 EDT m=+0.048836182 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.239386226 -0400 EDT m=+0.062906669 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:51 managed-node1 python3.9[35925]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4180. Jul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.466820809 -0400 EDT m=+0.290341396 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.470785368 -0400 EDT m=+0.294305786 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35931]: This container is intended for podman CI testing Jul 07 20:12:51 managed-node1 conmon[35931]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events Jul 07 20:12:51 managed-node1 systemd[1]: Reloading. Jul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.552775836 -0400 EDT m=+0.056358134 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0) Jul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.570347566 -0400 EDT m=+0.073929520 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test) Jul 07 20:12:51 managed-node1 systemd-rc-local-generator[35966]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:51 managed-node1 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play... â–‘â–‘ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 4185. Jul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container. â–‘â–‘ Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4186. Jul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.780283168 -0400 EDT m=+0.283865102 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35985]: This container is intended for podman CI testing Jul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state. Jul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.794156305 -0400 EDT m=+0.297738289 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.805487564 -0400 EDT m=+0.042136278 pod stop 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 (image=, name=auth_test_1_kube) Jul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.807535435 -0400 EDT m=+0.044184379 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry) Jul 07 20:12:51 managed-node1 systemd[1]: var-lib-containers-storage-overlay-6e8d6012bb184b7d9b4ca98710c13c4398e87e9e6ee6aa4809430868bbb52621-merged.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay-6e8d6012bb184b7d9b4ca98710c13c4398e87e9e6ee6aa4809430868bbb52621-merged.mount has successfully entered the 'dead' state. Jul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.856254498 -0400 EDT m=+0.092903180 container cleanup b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage) Jul 07 20:12:51 managed-node1 systemd[1]: libpod-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631.scope has successfully entered the 'dead' state. Jul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.866791467 -0400 EDT m=+0.103440452 container died feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:51 managed-node1 systemd[1]: run-r8208ec8fcd4d4202907a4db922529a4b.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-r8208ec8fcd4d4202907a4db922529a4b.scope has successfully entered the 'dead' state. Jul 07 20:12:51 managed-node1 kernel: podman1: port 1(veth1) entered disabled state Jul 07 20:12:51 managed-node1 kernel: veth1 (unregistering): left allmulticast mode Jul 07 20:12:51 managed-node1 kernel: veth1 (unregistering): left promiscuous mode Jul 07 20:12:51 managed-node1 kernel: podman1: port 1(veth1) entered disabled state Jul 07 20:12:51 managed-node1 NetworkManager[642]: [1751933571.9144] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 07 20:12:51 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service... â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 4192. Jul 07 20:12:51 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service. â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4192. Jul 07 20:12:52 managed-node1 systemd[1]: run-netns-netns\x2d2411dc39\x2d3430\x2d1e50\x2dd025\x2d1a201717aa6d.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-netns-netns\x2d2411dc39\x2d3430\x2d1e50\x2dd025\x2d1a201717aa6d.mount has successfully entered the 'dead' state. Jul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-rootfs-merge.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.071289258 -0400 EDT m=+0.307937965 container cleanup feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:52 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice. â–‘â–‘ Subject: A stop job for unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice has finished â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice has finished. â–‘â–‘ â–‘â–‘ The job identifier is 4258 and the job result is done. Jul 07 20:12:52 managed-node1 systemd[1]: machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: Consumed 1.746s CPU time. â–‘â–‘ Subject: Resources consumed by unit runtime â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice completed and consumed the indicated resources. Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.098281515 -0400 EDT m=+0.334930231 container remove b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.124590961 -0400 EDT m=+0.361239677 container remove feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:52 managed-node1 systemd[1]: machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: Failed to open /run/systemd/transient/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: No such file or directory Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.134201698 -0400 EDT m=+0.370850385 pod remove 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 (image=, name=auth_test_1_kube) Jul 07 20:12:52 managed-node1 podman[35983]: Pods stopped: Jul 07 20:12:52 managed-node1 podman[35983]: 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 Jul 07 20:12:52 managed-node1 podman[35983]: Pods removed: Jul 07 20:12:52 managed-node1 podman[35983]: 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 Jul 07 20:12:52 managed-node1 podman[35983]: Secrets removed: Jul 07 20:12:52 managed-node1 podman[35983]: Volumes removed: Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.138635694 -0400 EDT m=+0.375284640 container kill 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:52 managed-node1 systemd[1]: libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope has successfully entered the 'dead' state. Jul 07 20:12:52 managed-node1 conmon[20791]: conmon 782aa8ef72d65d91e61b : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope/container/memory.events Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.145356852 -0400 EDT m=+0.382005666 container died 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-rootfs-merge.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-rootfs-merge.mount has successfully entered the 'dead' state. Jul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.214468795 -0400 EDT m=+0.451117504 container remove 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service) Jul 07 20:12:52 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state. Jul 07 20:12:52 managed-node1 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play. â–‘â–‘ Subject: A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A stop job for unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service has finished. â–‘â–‘ â–‘â–‘ The job identifier is 4185 and the job result is done. Jul 07 20:12:52 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 36.014s CPU time. â–‘â–‘ Subject: Resources consumed by unit runtime â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service completed and consumed the indicated resources. Jul 07 20:12:52 managed-node1 python3.9[36210]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:12:53 managed-node1 python3.9[36361]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jul 07 20:12:53 managed-node1 python3.9[36361]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml Jul 07 20:12:53 managed-node1 python3.9[36523]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:12:54 managed-node1 python3.9[36672]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:56 managed-node1 python3.9[36823]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:12:56 managed-node1 systemd[1]: Reloading. Jul 07 20:12:56 managed-node1 systemd-rc-local-generator[36842]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:12:57 managed-node1 python3.9[37008]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:12:57 managed-node1 python3.9[37308]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:12:59 managed-node1 python3.9[37607]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:00 managed-node1 python3.9[37762]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:01 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:13:03 managed-node1 python3.9[37913]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:05 managed-node1 python3.9[38064]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:06 managed-node1 python3.9[38215]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:07 managed-node1 python3.9[38366]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:08 managed-node1 python3.9[38516]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jul 07 20:13:08 managed-node1 python3.9[38667]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:09 managed-node1 python3.9[38816]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:10 managed-node1 python3.9[38965]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:12 managed-node1 python3.9[39116]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:13:12 managed-node1 systemd[1]: Reloading. Jul 07 20:13:12 managed-node1 systemd-rc-local-generator[39137]: /etc/rc.d/rc.local is not marked executable, skipping. Jul 07 20:13:12 managed-node1 python3.9[39301]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:13 managed-node1 python3.9[39601]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:15 managed-node1 python3.9[39900]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:16 managed-node1 python3.9[40054]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None Jul 07 20:13:17 managed-node1 python3.9[40204]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:17 managed-node1 python3.9[40354]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jul 07 20:13:18 managed-node1 python3.9[40504]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:18 managed-node1 python3.9[40653]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type "volume"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:19 managed-node1 python3.9[40810]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:19 managed-node1 systemd[1]: libpod-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope has successfully entered the 'dead' state. Jul 07 20:13:19 managed-node1 podman[40811]: 2025-07-07 20:13:19.265873387 -0400 EDT m=+0.045935077 container died 2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b (image=quay.io/libpod/registry:2.8.2, name=podman_registry) Jul 07 20:13:19 managed-node1 kernel: podman0: port 1(veth0) entered disabled state Jul 07 20:13:19 managed-node1 kernel: veth0 (unregistering): left allmulticast mode Jul 07 20:13:19 managed-node1 kernel: veth0 (unregistering): left promiscuous mode Jul 07 20:13:19 managed-node1 kernel: podman0: port 1(veth0) entered disabled state Jul 07 20:13:19 managed-node1 NetworkManager[642]: [1751933599.3183] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jul 07 20:13:19 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service... â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has begun execution. â–‘â–‘ â–‘â–‘ The job identifier is 4260. Jul 07 20:13:19 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service. â–‘â–‘ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ A start job for unit NetworkManager-dispatcher.service has finished successfully. â–‘â–‘ â–‘â–‘ The job identifier is 4260. Jul 07 20:13:19 managed-node1 systemd[1]: run-netns-netns\x2da357660f\x2d51e9\x2def21\x2deb3c\x2d7f281ab8e18b.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit run-netns-netns\x2da357660f\x2d51e9\x2def21\x2deb3c\x2d7f281ab8e18b.mount has successfully entered the 'dead' state. Jul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b-userdata-shm.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay\x2dcontainers-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b-userdata-shm.mount has successfully entered the 'dead' state. Jul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay-112b09f11ff8fbba8e451f27769fa8e700d9bb89038833f136b238148dff37fa-merged.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay-112b09f11ff8fbba8e451f27769fa8e700d9bb89038833f136b238148dff37fa-merged.mount has successfully entered the 'dead' state. Jul 07 20:13:19 managed-node1 podman[40811]: 2025-07-07 20:13:19.49814603 -0400 EDT m=+0.278207625 container remove 2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b (image=quay.io/libpod/registry:2.8.2, name=podman_registry) Jul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:13:19 managed-node1 systemd[1]: libpod-conmon-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit libpod-conmon-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope has successfully entered the 'dead' state. Jul 07 20:13:19 managed-node1 python3.9[41023]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm 1e074241011384a8157c17bad24c616d5279de9c3f24494baf6b8341065b25b1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:19 managed-node1 podman[41024]: 2025-07-07 20:13:19.910047174 -0400 EDT m=+0.025683882 volume remove 1e074241011384a8157c17bad24c616d5279de9c3f24494baf6b8341065b25b1 Jul 07 20:13:20 managed-node1 python3.9[41180]: ansible-file Invoked with path=/tmp/lsr_g7zmh6pf_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:22 managed-node1 python3.9[41378]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:13:23 managed-node1 python3.9[41553]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:23 managed-node1 python3.9[41702]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:25 managed-node1 python3.9[42000]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:26 managed-node1 python3.9[42155]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:13:26 managed-node1 python3.9[42305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:28 managed-node1 python3.9[42456]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None Jul 07 20:13:28 managed-node1 python3.9[42605]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_630o0ml1/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:29 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jul 07 20:13:29 managed-node1 python3.9[42755]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jul 07 20:13:29 managed-node1 useradd[42757]: new group: name=user1, GID=1000 Jul 07 20:13:29 managed-node1 useradd[42757]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0 Jul 07 20:13:31 managed-node1 python3.9[43061]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:32 managed-node1 python3.9[43217]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None Jul 07 20:13:33 managed-node1 python3.9[43367]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:33 managed-node1 python3.9[43518]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:33 managed-node1 python3.9[43668]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:34 managed-node1 python3.9[43818]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:35 managed-node1 python3.9[43967]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:35 managed-node1 python3.9[44087]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933614.9410055-13310-24639621612646/.source.conf _original_basename=.lefyhyea follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:36 managed-node1 python3.9[44236]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:36 managed-node1 python3.9[44385]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:36 managed-node1 python3.9[44505]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933616.3031077-13365-95635820072900/.source.conf _original_basename=.79pds_qw follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:37 managed-node1 python3.9[44654]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:37 managed-node1 python3.9[44803]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:38 managed-node1 python3.9[44923]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933617.4859679-13409-167338433742499/.source.conf _original_basename=.5__9m3z1 follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:38 managed-node1 python3.9[45072]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:38 managed-node1 python3.9[45221]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:39 managed-node1 python3.9[45370]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:39 managed-node1 python3.9[45490]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933619.023253-13468-75507327963883/.source.json _original_basename=.gsbubo1d follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:40 managed-node1 python3.9[45639]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:40 managed-node1 python3.9[45790]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:41 managed-node1 python3.9[45940]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:41 managed-node1 python3.9[46090]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:43 managed-node1 python3.9[46508]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:43 managed-node1 python3.9[46659]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:44 managed-node1 python3.9[46809]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:45 managed-node1 python3.9[46959]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:45 managed-node1 python3.9[47110]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:45 managed-node1 python3.9[47261]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:46 managed-node1 python3.9[47412]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:46 managed-node1 python3.9[47563]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:48 managed-node1 python3.9[47862]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:49 managed-node1 python3.9[48017]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:49 managed-node1 python3.9[48168]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:49 managed-node1 python3.9[48318]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:50 managed-node1 python3.9[48468]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:51 managed-node1 python3.9[48617]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:51 managed-node1 python3.9[48692]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=.yuv1oz8a recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:52 managed-node1 python3.9[48841]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:52 managed-node1 python3.9[48990]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:52 managed-node1 python3.9[49065]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=.t7udz4o_ recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:53 managed-node1 python3.9[49214]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:53 managed-node1 python3.9[49363]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:13:53 managed-node1 python3.9[49438]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=.acx818hv recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:54 managed-node1 python3.9[49587]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:54 managed-node1 python3.9[49736]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:55 managed-node1 python3.9[49887]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json Jul 07 20:13:56 managed-node1 python3.9[50036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:56 managed-node1 python3.9[50187]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:56 managed-node1 python3.9[50337]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:13:57 managed-node1 python3.9[50487]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:13:59 managed-node1 python3.9[50860]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:13:59 managed-node1 python3.9[51011]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:00 managed-node1 python3.9[51161]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:01 managed-node1 python3.9[51311]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:01 managed-node1 python3.9[51462]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:01 managed-node1 python3.9[51613]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:02 managed-node1 python3.9[51764]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:03 managed-node1 python3.9[52064]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:04 managed-node1 python3.9[52219]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:14:05 managed-node1 python3.9[52369]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:05 managed-node1 python3.9[52520]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:06 managed-node1 python3.9[52669]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:06 managed-node1 python3.9[52789]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933646.0899522-14266-164202167319133/.source.conf _original_basename=.f55n9smu follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:07 managed-node1 python3.9[52938]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:07 managed-node1 python3.9[53087]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:07 managed-node1 python3.9[53207]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933647.2349114-14295-264742941497318/.source.conf _original_basename=.kaocflcp follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:08 managed-node1 python3.9[53356]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:08 managed-node1 python3.9[53505]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:08 managed-node1 python3.9[53627]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933648.3819814-14329-124138118490068/.source.conf _original_basename=.cw2ofq3l follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:09 managed-node1 python3.9[53776]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:09 managed-node1 python3.9[53925]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:10 managed-node1 python3.9[54076]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 07 20:14:10 managed-node1 python3.9[54225]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:10 managed-node1 python3.9[54347]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933650.3431063-14385-203115853451002/.source.json _original_basename=.9ge15xwj follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:11 managed-node1 python3.9[54496]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:12 managed-node1 python3.9[54647]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:14 managed-node1 python3.9[55067]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:15 managed-node1 python3.9[55218]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:15 managed-node1 python3.9[55369]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:16 managed-node1 python3.9[55520]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:16 managed-node1 python3.9[55671]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:18 managed-node1 python3.9[55971]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:19 managed-node1 python3.9[56126]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:20 managed-node1 python3.9[56277]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:21 managed-node1 python3.9[56426]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:21 managed-node1 python3.9[56501]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=.05q0dgxv recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:22 managed-node1 python3.9[56650]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:22 managed-node1 python3.9[56799]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:22 managed-node1 python3.9[56874]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.p8krhe9y recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:23 managed-node1 python3.9[57023]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:23 managed-node1 python3.9[57172]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:24 managed-node1 python3.9[57247]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.41qn4gp5 recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:24 managed-node1 python3.9[57396]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:24 managed-node1 python3.9[57545]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:25 managed-node1 python3.9[57696]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 07 20:14:26 managed-node1 python3.9[57845]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:27 managed-node1 python3.9[57996]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:28 managed-node1 python3.9[58369]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:29 managed-node1 python3.9[58520]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:30 managed-node1 python3.9[58671]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:30 managed-node1 python3.9[58822]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:30 managed-node1 python3.9[58973]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:31 managed-node1 python3.9[59124]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf Jul 07 20:14:32 managed-node1 python3.9[59273]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf Jul 07 20:14:32 managed-node1 python3.9[59422]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf Jul 07 20:14:33 managed-node1 python3.9[59571]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf Jul 07 20:14:33 managed-node1 python3.9[59720]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf Jul 07 20:14:34 managed-node1 python3.9[59869]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf Jul 07 20:14:35 managed-node1 python3.9[60167]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:37 managed-node1 python3.9[60322]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:38 managed-node1 python3.9[60473]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:38 managed-node1 python3.9[60622]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:38 managed-node1 python3.9[60744]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933678.2505736-15338-238322287102956/.source.conf _original_basename=.8f6zzn7v follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:39 managed-node1 python3.9[60893]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:39 managed-node1 python3.9[61042]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:40 managed-node1 python3.9[61117]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.ttqitrgk recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:40 managed-node1 python3.9[61266]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:41 managed-node1 python3.9[61415]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:14:41 managed-node1 python3.9[61490]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.chl3shxe recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:41 managed-node1 python3.9[61639]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:42 managed-node1 python3.9[61788]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:42 managed-node1 python3.9[61939]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json Jul 07 20:14:43 managed-node1 python3.9[62088]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf Jul 07 20:14:43 managed-node1 python3.9[62237]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:44 managed-node1 python3.9[62386]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:44 managed-node1 python3.9[62535]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:44 managed-node1 python3.9[62684]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:45 managed-node1 python3.9[62833]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:45 managed-node1 python3.9[62982]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:45 managed-node1 python3.9[63131]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:46 managed-node1 python3.9[63280]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:46 managed-node1 python3.9[63429]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:46 managed-node1 python3.9[63578]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:47 managed-node1 python3.9[63727]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_630o0ml1/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:47 managed-node1 python3.9[63877]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_630o0ml1 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:14:49 managed-node1 python3.9[64075]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:14:50 managed-node1 python3.9[64226]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:50 managed-node1 python3.9[64375]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:52 managed-node1 python3.9[64673]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:14:53 managed-node1 python3.9[64828]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:14:53 managed-node1 python3.9[64978]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:14:57 managed-node1 python3.9[65178]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:15:00 managed-node1 python3.9[65353]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:00 managed-node1 python3.9[65502]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:02 managed-node1 python3.9[65800]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:03 managed-node1 python3.9[65955]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:15:04 managed-node1 python3.9[66105]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:09 managed-node1 python3.9[66305]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jul 07 20:15:10 managed-node1 python3.9[66480]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:10 managed-node1 python3.9[66629]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:12 managed-node1 python3.9[66927]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:13 managed-node1 python3.9[67082]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jul 07 20:15:14 managed-node1 python3.9[67232]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:16 managed-node1 python3.9[67383]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:17 managed-node1 python3.9[67534]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:18 managed-node1 python3.9[67683]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:18 managed-node1 python3.9[67803]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933717.8906143-17219-30430442070937/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:20 managed-node1 python3.9[68101]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:21 managed-node1 python3.9[68256]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:23 managed-node1 python3.9[68407]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:24 managed-node1 podman[68566]: 2025-07-07 20:15:24.692519338 -0400 EDT m=+0.023060903 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY Jul 07 20:15:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:25 managed-node1 python3.9[68721]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:25 managed-node1 python3.9[68870]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jul 07 20:15:25 managed-node1 python3.9[68990]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933725.2913904-17409-127642446506455/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jul 07 20:15:28 managed-node1 python3.9[69288]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:29 managed-node1 python3.9[69443]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:32 managed-node1 python3.9[69594]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:33 managed-node1 python3.9[69745]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jul 07 20:15:33 managed-node1 python3.9[69895]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jul 07 20:15:34 managed-node1 python3.9[70195]: ansible-ansible.legacy.command Invoked with _raw_params=set -x set -o pipefail exec 1>&2 #podman volume rm --all #podman network prune -f podman volume ls podman network ls podman secret ls podman container ls podman pod ls podman images systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:34 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:34 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:35 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. â–‘â–‘ Subject: Unit succeeded â–‘â–‘ Defined-By: systemd â–‘â–‘ Support: https://access.redhat.com/support â–‘â–‘ â–‘â–‘ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jul 07 20:15:36 managed-node1 python3.9[70540]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jul 07 20:15:37 managed-node1 python3.9[70695]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None Jul 07 20:15:37 managed-node1 python3.9[70845]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node1 : ok=110 changed=2 unreachable=0 failed=2 skipped=190 rescued=2 ignored=0 SYSTEM ROLES ERRORS BEGIN v1 [ { "ansible_version": "2.17.12", "end_time": "2025-07-08T00:15:34.372726+00:00Z", "host": "managed-node1", "message": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "start_time": "2025-07-08T00:15:34.353165+00:00Z", "task_name": "Parse quadlet file", "task_path": "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12" }, { "ansible_version": "2.17.12", "delta": "0:00:00.179637", "end_time": "2025-07-07 20:15:34.863787", "host": "managed-node1", "message": "non-zero return code", "rc": 1, "start_time": "2025-07-07 20:15:34.684150", "stderr": "+ set -o pipefail\n+ exec\n+ podman volume ls\nDRIVER VOLUME NAME\n+ podman network ls\nNETWORK ID NAME DRIVER\n2f259bab93aa podman bridge\nf12c2088dbae podman-default-kube-network bridge\n+ podman secret ls\nID NAME DRIVER CREATED UPDATED\n+ podman container ls\nCONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES\n+ podman pod ls\nPOD ID NAME STATUS CREATED INFRA ID # OF CONTAINERS\n+ podman images\nREPOSITORY TAG IMAGE ID CREATED SIZE\nquay.io/libpod/registry 2.8.2 0030ba3d620c 23 months ago 24.6 MB\nlocalhost:5000/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB\nquay.io/libpod/testimage 20210610 9f9ec7f2fdef 4 years ago 7.99 MB\n+ systemctl list-units\n+ grep quadlet", "task_name": "Debug3", "task_path": "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270" }, { "ansible_version": "2.17.12", "end_time": "2025-07-08T00:15:37.533196+00:00Z", "host": "managed-node1", "message": "The given podman user [user_quadlet_basic] does not exist - cannot continue\n", "start_time": "2025-07-08T00:15:37.511135+00:00Z", "task_name": "Fail if user does not exist", "task_path": "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9" }, { "ansible_version": "2.17.12", "delta": "0:00:00.051019", "end_time": "2025-07-07 20:15:37.894301", "host": "managed-node1", "message": "", "rc": 0, "start_time": "2025-07-07 20:15:37.843282", "stdout": "Jul 07 20:12:41 managed-node1 podman[33622]: 2025-07-07 20:12:41.50636717 -0400 EDT m=+0.060702870 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:41 managed-node1 python3.9[33616]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3725.\nJul 07 20:12:41 managed-node1 podman[33622]: 2025-07-07 20:12:41.567765779 -0400 EDT m=+0.122101592 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:41 managed-node1 podman[33622]: 2025-07-07 20:12:41.57173424 -0400 EDT m=+0.126069868 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:41 managed-node1 conmon[33633]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33633]: This container is intended for podman CI testing\nJul 07 20:12:41 managed-node1 podman[33643]: 2025-07-07 20:12:41.621077845 -0400 EDT m=+0.032903868 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:41 managed-node1 podman[33643]: 2025-07-07 20:12:41.635371985 -0400 EDT m=+0.047197781 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3730.\nJul 07 20:12:41 managed-node1 podman[33643]: 2025-07-07 20:12:41.684616758 -0400 EDT m=+0.096442569 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:41 managed-node1 podman[33643]: 2025-07-07 20:12:41.688361824 -0400 EDT m=+0.100187675 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33679]: This container is intended for podman CI testing\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:41 managed-node1 podman[33683]: 2025-07-07 20:12:41.721345697 -0400 EDT m=+0.023159256 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:41 managed-node1 podman[33683]: 2025-07-07 20:12:41.734216339 -0400 EDT m=+0.036029915 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3735.\nJul 07 20:12:41 managed-node1 podman[33683]: 2025-07-07 20:12:41.785871287 -0400 EDT m=+0.087684859 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:41 managed-node1 podman[33683]: 2025-07-07 20:12:41.788852193 -0400 EDT m=+0.090665898 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33694]: This container is intended for podman CI testing\nJul 07 20:12:41 managed-node1 podman[33698]: 2025-07-07 20:12:41.824660189 -0400 EDT m=+0.023362688 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:41 managed-node1 podman[33698]: 2025-07-07 20:12:41.837386611 -0400 EDT m=+0.036089125 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3740.\nJul 07 20:12:41 managed-node1 podman[33698]: 2025-07-07 20:12:41.883258808 -0400 EDT m=+0.081961361 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:41 managed-node1 podman[33698]: 2025-07-07 20:12:41.886125769 -0400 EDT m=+0.084828300 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33709]: This container is intended for podman CI testing\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:41 managed-node1 podman[33713]: 2025-07-07 20:12:41.920368053 -0400 EDT m=+0.023513273 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:41 managed-node1 podman[33713]: 2025-07-07 20:12:41.933238289 -0400 EDT m=+0.036383481 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:41 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3745.\nJul 07 20:12:41 managed-node1 podman[33713]: 2025-07-07 20:12:41.981589962 -0400 EDT m=+0.084735325 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:41 managed-node1 podman[33713]: 2025-07-07 20:12:41.986739296 -0400 EDT m=+0.089884524 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:41 managed-node1 auth_test_1_kube-auth_test_1_kube[33724]: This container is intended for podman CI testing\nJul 07 20:12:41 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 podman[33728]: 2025-07-07 20:12:42.021558667 -0400 EDT m=+0.024961424 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 podman[33728]: 2025-07-07 20:12:42.034303085 -0400 EDT m=+0.037705836 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3750.\nJul 07 20:12:42 managed-node1 podman[33728]: 2025-07-07 20:12:42.085120285 -0400 EDT m=+0.088523050 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 podman[33728]: 2025-07-07 20:12:42.088183664 -0400 EDT m=+0.091586459 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33739]: This container is intended for podman CI testing\nJul 07 20:12:42 managed-node1 conmon[33739]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:42 managed-node1 podman[33743]: 2025-07-07 20:12:42.12308754 -0400 EDT m=+0.023361800 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:42 managed-node1 podman[33743]: 2025-07-07 20:12:42.135582446 -0400 EDT m=+0.035856641 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3755.\nJul 07 20:12:42 managed-node1 podman[33743]: 2025-07-07 20:12:42.187408375 -0400 EDT m=+0.087682662 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 podman[33743]: 2025-07-07 20:12:42.191561529 -0400 EDT m=+0.091835831 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33754]: This container is intended for podman CI testing\nJul 07 20:12:42 managed-node1 conmon[33754]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:42 managed-node1 podman[33761]: 2025-07-07 20:12:42.246794704 -0400 EDT m=+0.035170840 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:42 managed-node1 podman[33761]: 2025-07-07 20:12:42.263541922 -0400 EDT m=+0.051917974 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3760.\nJul 07 20:12:42 managed-node1 rsyslogd[809]: imjournal: journal files changed, reloading... [v8.2412.0-2.el9 try https://www.rsyslog.com/e/0 ]\nJul 07 20:12:42 managed-node1 podman[33761]: 2025-07-07 20:12:42.319658138 -0400 EDT m=+0.108034286 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33828]: This container is intended for podman CI testing\nJul 07 20:12:42 managed-node1 podman[33761]: 2025-07-07 20:12:42.337549515 -0400 EDT m=+0.125925516 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 podman[33854]: 2025-07-07 20:12:42.381151412 -0400 EDT m=+0.028864286 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:42 managed-node1 podman[33854]: 2025-07-07 20:12:42.397512131 -0400 EDT m=+0.045224977 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3765.\nJul 07 20:12:42 managed-node1 podman[33854]: 2025-07-07 20:12:42.458716015 -0400 EDT m=+0.106428982 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33910]: This container is intended for podman CI testing\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 conmon[33910]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:42 managed-node1 podman[33854]: 2025-07-07 20:12:42.464861994 -0400 EDT m=+0.112574931 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:42 managed-node1 podman[33914]: 2025-07-07 20:12:42.518544455 -0400 EDT m=+0.037340984 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:42 managed-node1 podman[33914]: 2025-07-07 20:12:42.533005797 -0400 EDT m=+0.051802239 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 python3.9[33908]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3770.\nJul 07 20:12:42 managed-node1 podman[33914]: 2025-07-07 20:12:42.597515745 -0400 EDT m=+0.116312289 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:42 managed-node1 podman[33914]: 2025-07-07 20:12:42.601910611 -0400 EDT m=+0.120706991 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33925]: This container is intended for podman CI testing\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 podman[33931]: 2025-07-07 20:12:42.649266086 -0400 EDT m=+0.035775114 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 podman[33931]: 2025-07-07 20:12:42.662040499 -0400 EDT m=+0.048549443 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3775.\nJul 07 20:12:42 managed-node1 podman[33931]: 2025-07-07 20:12:42.704656431 -0400 EDT m=+0.091165403 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:42 managed-node1 podman[33931]: 2025-07-07 20:12:42.708298645 -0400 EDT m=+0.094807598 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33966]: This container is intended for podman CI testing\nJul 07 20:12:42 managed-node1 podman[33970]: 2025-07-07 20:12:42.740044829 -0400 EDT m=+0.022495809 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 podman[33970]: 2025-07-07 20:12:42.752990822 -0400 EDT m=+0.035441768 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3780.\nJul 07 20:12:42 managed-node1 podman[33970]: 2025-07-07 20:12:42.800279156 -0400 EDT m=+0.082730097 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33981]: This container is intended for podman CI testing\nJul 07 20:12:42 managed-node1 podman[33970]: 2025-07-07 20:12:42.803693605 -0400 EDT m=+0.086144594 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 conmon[33981]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:42 managed-node1 podman[33985]: 2025-07-07 20:12:42.838450335 -0400 EDT m=+0.022735162 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:42 managed-node1 podman[33985]: 2025-07-07 20:12:42.851144871 -0400 EDT m=+0.035429657 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:42 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3785.\nJul 07 20:12:42 managed-node1 podman[33985]: 2025-07-07 20:12:42.906726377 -0400 EDT m=+0.091011163 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:42 managed-node1 podman[33985]: 2025-07-07 20:12:42.909767637 -0400 EDT m=+0.094052553 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:42 managed-node1 auth_test_1_kube-auth_test_1_kube[33996]: This container is intended for podman CI testing\nJul 07 20:12:42 managed-node1 conmon[33996]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:42 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:42 managed-node1 podman[34000]: 2025-07-07 20:12:42.94603736 -0400 EDT m=+0.022389547 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:42 managed-node1 podman[34000]: 2025-07-07 20:12:42.95838577 -0400 EDT m=+0.034737914 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3790.\nJul 07 20:12:43 managed-node1 podman[34000]: 2025-07-07 20:12:43.010324461 -0400 EDT m=+0.086676634 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 podman[34000]: 2025-07-07 20:12:43.013287346 -0400 EDT m=+0.089639510 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34011]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 conmon[34011]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34015]: 2025-07-07 20:12:43.048792098 -0400 EDT m=+0.022901709 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 podman[34015]: 2025-07-07 20:12:43.061583347 -0400 EDT m=+0.035692962 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3795.\nJul 07 20:12:43 managed-node1 podman[34015]: 2025-07-07 20:12:43.109393741 -0400 EDT m=+0.083503343 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:43 managed-node1 podman[34015]: 2025-07-07 20:12:43.112325979 -0400 EDT m=+0.086435576 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34027]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 conmon[34027]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34031]: 2025-07-07 20:12:43.147824494 -0400 EDT m=+0.022685823 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 podman[34031]: 2025-07-07 20:12:43.160367485 -0400 EDT m=+0.035228812 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3800.\nJul 07 20:12:43 managed-node1 podman[34031]: 2025-07-07 20:12:43.208261872 -0400 EDT m=+0.083123369 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 podman[34031]: 2025-07-07 20:12:43.211258219 -0400 EDT m=+0.086119570 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34042]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 conmon[34042]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34046]: 2025-07-07 20:12:43.246832569 -0400 EDT m=+0.022325446 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:43 managed-node1 podman[34046]: 2025-07-07 20:12:43.259383759 -0400 EDT m=+0.034876604 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3805.\nJul 07 20:12:43 managed-node1 podman[34046]: 2025-07-07 20:12:43.305492865 -0400 EDT m=+0.080985790 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 podman[34046]: 2025-07-07 20:12:43.308387804 -0400 EDT m=+0.083880721 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34057]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 conmon[34057]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34061]: 2025-07-07 20:12:43.34873328 -0400 EDT m=+0.027342276 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 podman[34061]: 2025-07-07 20:12:43.363474331 -0400 EDT m=+0.042083270 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3810.\nJul 07 20:12:43 managed-node1 podman[34061]: 2025-07-07 20:12:43.410621492 -0400 EDT m=+0.089230429 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:43 managed-node1 podman[34061]: 2025-07-07 20:12:43.413616835 -0400 EDT m=+0.092225843 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34073]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 conmon[34073]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34077]: 2025-07-07 20:12:43.448247251 -0400 EDT m=+0.020927733 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:43 managed-node1 podman[34077]: 2025-07-07 20:12:43.461523867 -0400 EDT m=+0.034204271 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3815.\nJul 07 20:12:43 managed-node1 podman[34077]: 2025-07-07 20:12:43.505693928 -0400 EDT m=+0.078374360 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:43 managed-node1 podman[34077]: 2025-07-07 20:12:43.50945538 -0400 EDT m=+0.082135854 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34089]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 conmon[34089]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34093]: 2025-07-07 20:12:43.542568623 -0400 EDT m=+0.023392139 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:43 managed-node1 podman[34093]: 2025-07-07 20:12:43.554898291 -0400 EDT m=+0.035721832 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3820.\nJul 07 20:12:43 managed-node1 podman[34093]: 2025-07-07 20:12:43.604504653 -0400 EDT m=+0.085328217 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 podman[34093]: 2025-07-07 20:12:43.608980924 -0400 EDT m=+0.089804434 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34104]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 conmon[34104]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34108]: 2025-07-07 20:12:43.643575359 -0400 EDT m=+0.021129814 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:43 managed-node1 podman[34108]: 2025-07-07 20:12:43.656332565 -0400 EDT m=+0.033887028 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3825.\nJul 07 20:12:43 managed-node1 podman[34108]: 2025-07-07 20:12:43.69945226 -0400 EDT m=+0.077006794 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 podman[34108]: 2025-07-07 20:12:43.702989717 -0400 EDT m=+0.080544207 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34119]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 podman[34123]: 2025-07-07 20:12:43.737060945 -0400 EDT m=+0.024227188 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 podman[34123]: 2025-07-07 20:12:43.750183557 -0400 EDT m=+0.037349790 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3830.\nJul 07 20:12:43 managed-node1 podman[34123]: 2025-07-07 20:12:43.79819496 -0400 EDT m=+0.085361237 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 podman[34123]: 2025-07-07 20:12:43.801180924 -0400 EDT m=+0.088347172 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34134]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 conmon[34134]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34138]: 2025-07-07 20:12:43.835320547 -0400 EDT m=+0.021567597 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 podman[34138]: 2025-07-07 20:12:43.847704391 -0400 EDT m=+0.033951418 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3835.\nJul 07 20:12:43 managed-node1 podman[34138]: 2025-07-07 20:12:43.894082011 -0400 EDT m=+0.080329100 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:43 managed-node1 podman[34138]: 2025-07-07 20:12:43.897088852 -0400 EDT m=+0.083336170 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34149]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 conmon[34149]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:43 managed-node1 podman[34153]: 2025-07-07 20:12:43.933651016 -0400 EDT m=+0.022365107 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:43 managed-node1 podman[34153]: 2025-07-07 20:12:43.946066969 -0400 EDT m=+0.034781019 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:43 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3840.\nJul 07 20:12:43 managed-node1 podman[34153]: 2025-07-07 20:12:43.994562003 -0400 EDT m=+0.083276126 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:43 managed-node1 podman[34153]: 2025-07-07 20:12:43.997511538 -0400 EDT m=+0.086225656 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:43 managed-node1 auth_test_1_kube-auth_test_1_kube[34165]: This container is intended for podman CI testing\nJul 07 20:12:43 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 podman[34169]: 2025-07-07 20:12:44.031715352 -0400 EDT m=+0.024332851 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:44 managed-node1 podman[34169]: 2025-07-07 20:12:44.044791422 -0400 EDT m=+0.037408902 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3845.\nJul 07 20:12:44 managed-node1 podman[34169]: 2025-07-07 20:12:44.093420708 -0400 EDT m=+0.086038408 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34180]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 conmon[34180]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:44 managed-node1 podman[34169]: 2025-07-07 20:12:44.099571715 -0400 EDT m=+0.092189277 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:44 managed-node1 podman[34184]: 2025-07-07 20:12:44.130879463 -0400 EDT m=+0.022875842 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:44 managed-node1 podman[34184]: 2025-07-07 20:12:44.143583403 -0400 EDT m=+0.035579729 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3850.\nJul 07 20:12:44 managed-node1 podman[34184]: 2025-07-07 20:12:44.192949135 -0400 EDT m=+0.084945537 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:44 managed-node1 podman[34184]: 2025-07-07 20:12:44.196037702 -0400 EDT m=+0.088034089 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34195]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 conmon[34195]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:44 managed-node1 podman[34199]: 2025-07-07 20:12:44.232452577 -0400 EDT m=+0.023557794 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:44 managed-node1 podman[34199]: 2025-07-07 20:12:44.245164617 -0400 EDT m=+0.036269711 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3855.\nJul 07 20:12:44 managed-node1 podman[34199]: 2025-07-07 20:12:44.289637994 -0400 EDT m=+0.080743233 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34211]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 conmon[34211]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:44 managed-node1 podman[34199]: 2025-07-07 20:12:44.296236962 -0400 EDT m=+0.087342134 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:44 managed-node1 podman[34215]: 2025-07-07 20:12:44.334423585 -0400 EDT m=+0.026223637 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:44 managed-node1 podman[34215]: 2025-07-07 20:12:44.34696504 -0400 EDT m=+0.038765223 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3860.\nJul 07 20:12:44 managed-node1 podman[34215]: 2025-07-07 20:12:44.394138504 -0400 EDT m=+0.085938568 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 podman[34215]: 2025-07-07 20:12:44.398059266 -0400 EDT m=+0.089859394 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34227]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 podman[34231]: 2025-07-07 20:12:44.433541456 -0400 EDT m=+0.023710477 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:44 managed-node1 podman[34231]: 2025-07-07 20:12:44.446525508 -0400 EDT m=+0.036694502 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3865.\nJul 07 20:12:44 managed-node1 podman[34231]: 2025-07-07 20:12:44.499327904 -0400 EDT m=+0.089496946 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:44 managed-node1 podman[34231]: 2025-07-07 20:12:44.502313318 -0400 EDT m=+0.092482358 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34242]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 conmon[34242]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:44 managed-node1 podman[34246]: 2025-07-07 20:12:44.538737175 -0400 EDT m=+0.023233861 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:44 managed-node1 podman[34246]: 2025-07-07 20:12:44.551520023 -0400 EDT m=+0.036016714 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3870.\nJul 07 20:12:44 managed-node1 podman[34246]: 2025-07-07 20:12:44.603486031 -0400 EDT m=+0.087982819 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:44 managed-node1 podman[34246]: 2025-07-07 20:12:44.606474049 -0400 EDT m=+0.090970895 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34257]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 podman[34261]: 2025-07-07 20:12:44.642852207 -0400 EDT m=+0.021419460 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:44 managed-node1 podman[34261]: 2025-07-07 20:12:44.655259328 -0400 EDT m=+0.033826523 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3875.\nJul 07 20:12:44 managed-node1 podman[34261]: 2025-07-07 20:12:44.704108726 -0400 EDT m=+0.082675932 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:44 managed-node1 podman[34261]: 2025-07-07 20:12:44.707014409 -0400 EDT m=+0.085581642 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34273]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 conmon[34273]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:44 managed-node1 podman[34277]: 2025-07-07 20:12:44.741456944 -0400 EDT m=+0.020475966 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:44 managed-node1 podman[34277]: 2025-07-07 20:12:44.754038707 -0400 EDT m=+0.033057743 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3880.\nJul 07 20:12:44 managed-node1 podman[34277]: 2025-07-07 20:12:44.798618058 -0400 EDT m=+0.077637084 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:44 managed-node1 podman[34277]: 2025-07-07 20:12:44.801551057 -0400 EDT m=+0.080570258 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34288]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 podman[34292]: 2025-07-07 20:12:44.835424947 -0400 EDT m=+0.021183939 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:44 managed-node1 podman[34292]: 2025-07-07 20:12:44.848104715 -0400 EDT m=+0.033863640 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3885.\nJul 07 20:12:44 managed-node1 podman[34292]: 2025-07-07 20:12:44.89843642 -0400 EDT m=+0.084195352 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:44 managed-node1 podman[34292]: 2025-07-07 20:12:44.901416892 -0400 EDT m=+0.087175919 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34303]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 conmon[34303]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:44 managed-node1 podman[34307]: 2025-07-07 20:12:44.93488204 -0400 EDT m=+0.021084894 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:44 managed-node1 podman[34307]: 2025-07-07 20:12:44.948114757 -0400 EDT m=+0.034317523 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:44 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3890.\nJul 07 20:12:44 managed-node1 podman[34307]: 2025-07-07 20:12:44.99428785 -0400 EDT m=+0.080490660 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:44 managed-node1 auth_test_1_kube-auth_test_1_kube[34318]: This container is intended for podman CI testing\nJul 07 20:12:44 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:44 managed-node1 podman[34307]: 2025-07-07 20:12:44.997964153 -0400 EDT m=+0.084166944 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:45 managed-node1 podman[34322]: 2025-07-07 20:12:45.030961646 -0400 EDT m=+0.021565433 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:45 managed-node1 podman[34322]: 2025-07-07 20:12:45.047502169 -0400 EDT m=+0.038105908 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3895.\nJul 07 20:12:45 managed-node1 podman[34322]: 2025-07-07 20:12:45.103724388 -0400 EDT m=+0.094328080 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:45 managed-node1 podman[34322]: 2025-07-07 20:12:45.106984259 -0400 EDT m=+0.097587984 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34364]: This container is intended for podman CI testing\nJul 07 20:12:45 managed-node1 conmon[34364]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:45 managed-node1 podman[34389]: 2025-07-07 20:12:45.157943882 -0400 EDT m=+0.030560165 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:45 managed-node1 podman[34389]: 2025-07-07 20:12:45.170688107 -0400 EDT m=+0.043304256 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3900.\nJul 07 20:12:45 managed-node1 podman[34389]: 2025-07-07 20:12:45.234124933 -0400 EDT m=+0.106741355 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:45 managed-node1 podman[34389]: 2025-07-07 20:12:45.238490952 -0400 EDT m=+0.111107125 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34447]: This container is intended for podman CI testing\nJul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 conmon[34447]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:45 managed-node1 podman[34477]: 2025-07-07 20:12:45.288662798 -0400 EDT m=+0.034016222 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:45 managed-node1 podman[34477]: 2025-07-07 20:12:45.301874445 -0400 EDT m=+0.047228008 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3905.\nJul 07 20:12:45 managed-node1 podman[34477]: 2025-07-07 20:12:45.381555979 -0400 EDT m=+0.126909625 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34488]: This container is intended for podman CI testing\nJul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 podman[34477]: 2025-07-07 20:12:45.38672817 -0400 EDT m=+0.132081609 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 python3.9[34476]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:45 managed-node1 podman[34492]: 2025-07-07 20:12:45.438723941 -0400 EDT m=+0.040591696 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 podman[34492]: 2025-07-07 20:12:45.453284139 -0400 EDT m=+0.055151863 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3910.\nJul 07 20:12:45 managed-node1 podman[34492]: 2025-07-07 20:12:45.505040801 -0400 EDT m=+0.106908624 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:45 managed-node1 podman[34492]: 2025-07-07 20:12:45.508106515 -0400 EDT m=+0.109974334 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34530]: This container is intended for podman CI testing\nJul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 conmon[34530]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:45 managed-node1 podman[34534]: 2025-07-07 20:12:45.551903395 -0400 EDT m=+0.029054384 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:45 managed-node1 podman[34534]: 2025-07-07 20:12:45.565026326 -0400 EDT m=+0.042177295 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3915.\nJul 07 20:12:45 managed-node1 podman[34534]: 2025-07-07 20:12:45.612904168 -0400 EDT m=+0.090055136 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34546]: This container is intended for podman CI testing\nJul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 podman[34534]: 2025-07-07 20:12:45.616986931 -0400 EDT m=+0.094138023 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:45 managed-node1 podman[34550]: 2025-07-07 20:12:45.65020363 -0400 EDT m=+0.022500482 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:45 managed-node1 podman[34550]: 2025-07-07 20:12:45.662323598 -0400 EDT m=+0.034620330 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3920.\nJul 07 20:12:45 managed-node1 podman[34550]: 2025-07-07 20:12:45.707102849 -0400 EDT m=+0.079399643 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:45 managed-node1 podman[34550]: 2025-07-07 20:12:45.710030304 -0400 EDT m=+0.082327099 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34561]: This container is intended for podman CI testing\nJul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 podman[34565]: 2025-07-07 20:12:45.741502106 -0400 EDT m=+0.022128096 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:45 managed-node1 podman[34565]: 2025-07-07 20:12:45.754021345 -0400 EDT m=+0.034647277 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3925.\nJul 07 20:12:45 managed-node1 podman[34565]: 2025-07-07 20:12:45.802124996 -0400 EDT m=+0.082750990 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:45 managed-node1 podman[34565]: 2025-07-07 20:12:45.805080684 -0400 EDT m=+0.085706677 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34576]: This container is intended for podman CI testing\nJul 07 20:12:45 managed-node1 conmon[34576]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:45 managed-node1 podman[34580]: 2025-07-07 20:12:45.839082255 -0400 EDT m=+0.020790370 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:45 managed-node1 podman[34580]: 2025-07-07 20:12:45.851852538 -0400 EDT m=+0.033560581 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3930.\nJul 07 20:12:45 managed-node1 podman[34580]: 2025-07-07 20:12:45.898524256 -0400 EDT m=+0.080232378 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 podman[34580]: 2025-07-07 20:12:45.901532521 -0400 EDT m=+0.083240667 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:45 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:45 managed-node1 auth_test_1_kube-auth_test_1_kube[34591]: This container is intended for podman CI testing\nJul 07 20:12:45 managed-node1 conmon[34591]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:45 managed-node1 podman[34595]: 2025-07-07 20:12:45.939231145 -0400 EDT m=+0.023588832 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:45 managed-node1 podman[34595]: 2025-07-07 20:12:45.952028898 -0400 EDT m=+0.036386479 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:45 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3935.\nJul 07 20:12:46 managed-node1 podman[34595]: 2025-07-07 20:12:46.000385348 -0400 EDT m=+0.084742969 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:46 managed-node1 podman[34595]: 2025-07-07 20:12:46.003323704 -0400 EDT m=+0.087681325 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34606]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 podman[34610]: 2025-07-07 20:12:46.038256205 -0400 EDT m=+0.023133829 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:46 managed-node1 podman[34610]: 2025-07-07 20:12:46.050563115 -0400 EDT m=+0.035440605 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3940.\nJul 07 20:12:46 managed-node1 podman[34610]: 2025-07-07 20:12:46.097984316 -0400 EDT m=+0.082862186 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:46 managed-node1 podman[34610]: 2025-07-07 20:12:46.101388337 -0400 EDT m=+0.086265909 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34621]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 conmon[34621]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:46 managed-node1 podman[34625]: 2025-07-07 20:12:46.13377981 -0400 EDT m=+0.022146824 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 podman[34625]: 2025-07-07 20:12:46.146317225 -0400 EDT m=+0.034684206 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3945.\nJul 07 20:12:46 managed-node1 podman[34625]: 2025-07-07 20:12:46.188421525 -0400 EDT m=+0.076788579 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:46 managed-node1 podman[34625]: 2025-07-07 20:12:46.191434273 -0400 EDT m=+0.079801340 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34636]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 conmon[34636]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:46 managed-node1 podman[34640]: 2025-07-07 20:12:46.223847962 -0400 EDT m=+0.022003559 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:46 managed-node1 podman[34640]: 2025-07-07 20:12:46.236340875 -0400 EDT m=+0.034496479 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3950.\nJul 07 20:12:46 managed-node1 podman[34640]: 2025-07-07 20:12:46.284186503 -0400 EDT m=+0.082342176 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 podman[34640]: 2025-07-07 20:12:46.28721142 -0400 EDT m=+0.085367074 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34651]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 conmon[34651]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:46 managed-node1 podman[34655]: 2025-07-07 20:12:46.324290164 -0400 EDT m=+0.022430941 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 podman[34655]: 2025-07-07 20:12:46.337132899 -0400 EDT m=+0.035273639 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3955.\nJul 07 20:12:46 managed-node1 podman[34655]: 2025-07-07 20:12:46.381677742 -0400 EDT m=+0.079818544 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 podman[34655]: 2025-07-07 20:12:46.384681206 -0400 EDT m=+0.082821978 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34666]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 podman[34672]: 2025-07-07 20:12:46.423094224 -0400 EDT m=+0.029116182 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:46 managed-node1 podman[34672]: 2025-07-07 20:12:46.436067954 -0400 EDT m=+0.042089938 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3960.\nJul 07 20:12:46 managed-node1 podman[34672]: 2025-07-07 20:12:46.483206874 -0400 EDT m=+0.089228758 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:46 managed-node1 podman[34672]: 2025-07-07 20:12:46.48613867 -0400 EDT m=+0.092160608 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34683]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 conmon[34683]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:46 managed-node1 podman[34687]: 2025-07-07 20:12:46.520804338 -0400 EDT m=+0.021717877 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 podman[34687]: 2025-07-07 20:12:46.533304539 -0400 EDT m=+0.034218030 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3965.\nJul 07 20:12:46 managed-node1 podman[34687]: 2025-07-07 20:12:46.585096331 -0400 EDT m=+0.086009885 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:46 managed-node1 podman[34687]: 2025-07-07 20:12:46.588088229 -0400 EDT m=+0.089001762 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34698]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 podman[34702]: 2025-07-07 20:12:46.624273128 -0400 EDT m=+0.022844259 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 podman[34702]: 2025-07-07 20:12:46.636433128 -0400 EDT m=+0.035004218 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3970.\nJul 07 20:12:46 managed-node1 podman[34702]: 2025-07-07 20:12:46.688355591 -0400 EDT m=+0.086926731 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34713]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 podman[34702]: 2025-07-07 20:12:46.691731915 -0400 EDT m=+0.090303278 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 conmon[34713]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:46 managed-node1 podman[34717]: 2025-07-07 20:12:46.726479493 -0400 EDT m=+0.022453022 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:46 managed-node1 podman[34717]: 2025-07-07 20:12:46.738600586 -0400 EDT m=+0.034574136 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3975.\nJul 07 20:12:46 managed-node1 podman[34717]: 2025-07-07 20:12:46.789135405 -0400 EDT m=+0.085109004 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 podman[34717]: 2025-07-07 20:12:46.793455608 -0400 EDT m=+0.089429341 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34728]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 conmon[34728]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:46 managed-node1 podman[34732]: 2025-07-07 20:12:46.828415822 -0400 EDT m=+0.022161399 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 podman[34732]: 2025-07-07 20:12:46.840867235 -0400 EDT m=+0.034612783 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:46 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3980.\nJul 07 20:12:46 managed-node1 podman[34732]: 2025-07-07 20:12:46.897505231 -0400 EDT m=+0.091250903 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:46 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:46 managed-node1 auth_test_1_kube-auth_test_1_kube[34761]: This container is intended for podman CI testing\nJul 07 20:12:46 managed-node1 conmon[34761]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:46 managed-node1 podman[34732]: 2025-07-07 20:12:46.90395772 -0400 EDT m=+0.097703287 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:46 managed-node1 podman[34776]: 2025-07-07 20:12:46.948881564 -0400 EDT m=+0.030238604 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:46 managed-node1 podman[34776]: 2025-07-07 20:12:46.962711084 -0400 EDT m=+0.044068090 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3985.\nJul 07 20:12:47 managed-node1 podman[34776]: 2025-07-07 20:12:47.024077481 -0400 EDT m=+0.105434639 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34834]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 conmon[34834]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:47 managed-node1 podman[34776]: 2025-07-07 20:12:47.043812635 -0400 EDT m=+0.125169644 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:47 managed-node1 podman[34862]: 2025-07-07 20:12:47.0939691 -0400 EDT m=+0.037738572 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:47 managed-node1 podman[34862]: 2025-07-07 20:12:47.108160326 -0400 EDT m=+0.051929759 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:47 managed-node1 python3.9[34893]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3990.\nJul 07 20:12:47 managed-node1 podman[34862]: 2025-07-07 20:12:47.242082057 -0400 EDT m=+0.185851724 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34900]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 podman[34862]: 2025-07-07 20:12:47.247567844 -0400 EDT m=+0.191337205 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 podman[34906]: 2025-07-07 20:12:47.298810756 -0400 EDT m=+0.036371284 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:47 managed-node1 podman[34906]: 2025-07-07 20:12:47.311789612 -0400 EDT m=+0.049350163 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 3995.\nJul 07 20:12:47 managed-node1 podman[34906]: 2025-07-07 20:12:47.358183364 -0400 EDT m=+0.095743929 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:47 managed-node1 podman[34906]: 2025-07-07 20:12:47.361134485 -0400 EDT m=+0.098695044 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34941]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 podman[34945]: 2025-07-07 20:12:47.395146285 -0400 EDT m=+0.022822779 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:47 managed-node1 podman[34945]: 2025-07-07 20:12:47.407443176 -0400 EDT m=+0.035119577 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4000.\nJul 07 20:12:47 managed-node1 podman[34945]: 2025-07-07 20:12:47.453148133 -0400 EDT m=+0.080824579 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:47 managed-node1 podman[34945]: 2025-07-07 20:12:47.456172469 -0400 EDT m=+0.083848916 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34956]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 conmon[34956]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:47 managed-node1 podman[34960]: 2025-07-07 20:12:47.496013156 -0400 EDT m=+0.030390600 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:47 managed-node1 podman[34960]: 2025-07-07 20:12:47.508423642 -0400 EDT m=+0.042801057 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4005.\nJul 07 20:12:47 managed-node1 podman[34960]: 2025-07-07 20:12:47.560213565 -0400 EDT m=+0.094591028 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 podman[34960]: 2025-07-07 20:12:47.563206119 -0400 EDT m=+0.097583597 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34971]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 conmon[34971]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:47 managed-node1 podman[34976]: 2025-07-07 20:12:47.596412334 -0400 EDT m=+0.022899092 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:47 managed-node1 podman[34976]: 2025-07-07 20:12:47.60891684 -0400 EDT m=+0.035403593 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4010.\nJul 07 20:12:47 managed-node1 podman[34976]: 2025-07-07 20:12:47.657587558 -0400 EDT m=+0.084074365 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:47 managed-node1 podman[34976]: 2025-07-07 20:12:47.660524503 -0400 EDT m=+0.087011410 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[34987]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 conmon[34987]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:47 managed-node1 podman[34991]: 2025-07-07 20:12:47.695593786 -0400 EDT m=+0.022541319 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:47 managed-node1 podman[34991]: 2025-07-07 20:12:47.708183773 -0400 EDT m=+0.035131258 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4015.\nJul 07 20:12:47 managed-node1 podman[34991]: 2025-07-07 20:12:47.750396628 -0400 EDT m=+0.077344189 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:47 managed-node1 podman[34991]: 2025-07-07 20:12:47.7538329 -0400 EDT m=+0.080780443 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[35002]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 podman[35006]: 2025-07-07 20:12:47.789148197 -0400 EDT m=+0.022104143 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:47 managed-node1 podman[35006]: 2025-07-07 20:12:47.801861569 -0400 EDT m=+0.034817418 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4020.\nJul 07 20:12:47 managed-node1 podman[35006]: 2025-07-07 20:12:47.849787507 -0400 EDT m=+0.082743391 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:47 managed-node1 podman[35006]: 2025-07-07 20:12:47.852788948 -0400 EDT m=+0.085745045 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[35017]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 conmon[35017]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:47 managed-node1 podman[35021]: 2025-07-07 20:12:47.886433326 -0400 EDT m=+0.023333664 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:47 managed-node1 podman[35021]: 2025-07-07 20:12:47.898635068 -0400 EDT m=+0.035535395 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4025.\nJul 07 20:12:47 managed-node1 podman[35021]: 2025-07-07 20:12:47.94781473 -0400 EDT m=+0.084715070 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 podman[35021]: 2025-07-07 20:12:47.950786701 -0400 EDT m=+0.087687110 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:47 managed-node1 auth_test_1_kube-auth_test_1_kube[35032]: This container is intended for podman CI testing\nJul 07 20:12:47 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:47 managed-node1 conmon[35032]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:47 managed-node1 podman[35036]: 2025-07-07 20:12:47.985090075 -0400 EDT m=+0.020824461 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:47 managed-node1 podman[35036]: 2025-07-07 20:12:47.99765414 -0400 EDT m=+0.033388507 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4030.\nJul 07 20:12:48 managed-node1 podman[35036]: 2025-07-07 20:12:48.045157855 -0400 EDT m=+0.080892244 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 podman[35036]: 2025-07-07 20:12:48.048132676 -0400 EDT m=+0.083867091 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35048]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 conmon[35048]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:48 managed-node1 podman[35052]: 2025-07-07 20:12:48.082018163 -0400 EDT m=+0.023940931 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:48 managed-node1 podman[35052]: 2025-07-07 20:12:48.094295104 -0400 EDT m=+0.036217821 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4035.\nJul 07 20:12:48 managed-node1 podman[35052]: 2025-07-07 20:12:48.137986786 -0400 EDT m=+0.079909620 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:48 managed-node1 podman[35052]: 2025-07-07 20:12:48.14089375 -0400 EDT m=+0.082816485 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35063]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 conmon[35063]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:48 managed-node1 podman[35067]: 2025-07-07 20:12:48.174678497 -0400 EDT m=+0.023706278 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:48 managed-node1 podman[35067]: 2025-07-07 20:12:48.18747958 -0400 EDT m=+0.036507308 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4040.\nJul 07 20:12:48 managed-node1 podman[35067]: 2025-07-07 20:12:48.233346802 -0400 EDT m=+0.082374550 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35079]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 podman[35067]: 2025-07-07 20:12:48.236827618 -0400 EDT m=+0.085855456 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 conmon[35079]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:48 managed-node1 podman[35083]: 2025-07-07 20:12:48.274803973 -0400 EDT m=+0.023869804 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:48 managed-node1 podman[35083]: 2025-07-07 20:12:48.287274677 -0400 EDT m=+0.036340490 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4045.\nJul 07 20:12:48 managed-node1 podman[35083]: 2025-07-07 20:12:48.333271188 -0400 EDT m=+0.082337067 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:48 managed-node1 podman[35083]: 2025-07-07 20:12:48.337315563 -0400 EDT m=+0.086381467 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35094]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 conmon[35094]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:48 managed-node1 podman[35098]: 2025-07-07 20:12:48.367811267 -0400 EDT m=+0.020607548 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 podman[35098]: 2025-07-07 20:12:48.380577814 -0400 EDT m=+0.033374070 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4050.\nJul 07 20:12:48 managed-node1 podman[35098]: 2025-07-07 20:12:48.425671708 -0400 EDT m=+0.078467975 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:48 managed-node1 podman[35098]: 2025-07-07 20:12:48.429157068 -0400 EDT m=+0.081953417 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35110]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 conmon[35110]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:48 managed-node1 podman[35114]: 2025-07-07 20:12:48.462682903 -0400 EDT m=+0.023415424 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 podman[35114]: 2025-07-07 20:12:48.475209185 -0400 EDT m=+0.035941652 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4055.\nJul 07 20:12:48 managed-node1 podman[35114]: 2025-07-07 20:12:48.528134339 -0400 EDT m=+0.088866851 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35125]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 podman[35114]: 2025-07-07 20:12:48.532354356 -0400 EDT m=+0.093086760 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 podman[35129]: 2025-07-07 20:12:48.564658555 -0400 EDT m=+0.021999487 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:48 managed-node1 podman[35129]: 2025-07-07 20:12:48.578013664 -0400 EDT m=+0.035354664 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4060.\nJul 07 20:12:48 managed-node1 podman[35129]: 2025-07-07 20:12:48.635246136 -0400 EDT m=+0.092587077 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:48 managed-node1 podman[35129]: 2025-07-07 20:12:48.638726936 -0400 EDT m=+0.096067987 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35140]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 conmon[35140]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 podman[35144]: 2025-07-07 20:12:48.674083012 -0400 EDT m=+0.022192448 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:48 managed-node1 podman[35144]: 2025-07-07 20:12:48.68682521 -0400 EDT m=+0.034934549 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4065.\nJul 07 20:12:48 managed-node1 podman[35144]: 2025-07-07 20:12:48.728260743 -0400 EDT m=+0.076370138 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:48 managed-node1 podman[35144]: 2025-07-07 20:12:48.731194428 -0400 EDT m=+0.079304101 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35155]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 conmon[35155]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:48 managed-node1 podman[35159]: 2025-07-07 20:12:48.762366829 -0400 EDT m=+0.020915245 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:48 managed-node1 podman[35159]: 2025-07-07 20:12:48.775477215 -0400 EDT m=+0.034025616 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4070.\nJul 07 20:12:48 managed-node1 podman[35159]: 2025-07-07 20:12:48.820672045 -0400 EDT m=+0.079220476 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35170]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 conmon[35170]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:48 managed-node1 podman[35159]: 2025-07-07 20:12:48.826968782 -0400 EDT m=+0.085517199 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 podman[35174]: 2025-07-07 20:12:48.859644209 -0400 EDT m=+0.023867878 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:48 managed-node1 podman[35174]: 2025-07-07 20:12:48.873025178 -0400 EDT m=+0.037249131 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:48 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4075.\nJul 07 20:12:48 managed-node1 podman[35174]: 2025-07-07 20:12:48.930458545 -0400 EDT m=+0.094682286 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:48 managed-node1 podman[35174]: 2025-07-07 20:12:48.933368693 -0400 EDT m=+0.097592453 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:48 managed-node1 auth_test_1_kube-auth_test_1_kube[35197]: This container is intended for podman CI testing\nJul 07 20:12:48 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:48 managed-node1 podman[35213]: 2025-07-07 20:12:48.980696341 -0400 EDT m=+0.028846487 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:48 managed-node1 podman[35213]: 2025-07-07 20:12:48.994689581 -0400 EDT m=+0.042839889 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4080.\nJul 07 20:12:49 managed-node1 podman[35213]: 2025-07-07 20:12:49.051661925 -0400 EDT m=+0.099812226 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35275]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 conmon[35275]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:49 managed-node1 podman[35213]: 2025-07-07 20:12:49.057246727 -0400 EDT m=+0.105397028 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:49 managed-node1 podman[35296]: 2025-07-07 20:12:49.100010948 -0400 EDT m=+0.028683846 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 podman[35296]: 2025-07-07 20:12:49.11603344 -0400 EDT m=+0.044706047 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4085.\nJul 07 20:12:49 managed-node1 podman[35296]: 2025-07-07 20:12:49.17786511 -0400 EDT m=+0.106537682 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35340]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 conmon[35340]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:49 managed-node1 podman[35296]: 2025-07-07 20:12:49.184863676 -0400 EDT m=+0.113536155 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 podman[35344]: 2025-07-07 20:12:49.245543717 -0400 EDT m=+0.045029030 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:49 managed-node1 podman[35344]: 2025-07-07 20:12:49.259415611 -0400 EDT m=+0.058900865 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:49 managed-node1 python3.9[35335]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4090.\nJul 07 20:12:49 managed-node1 podman[35344]: 2025-07-07 20:12:49.320731505 -0400 EDT m=+0.120216683 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 podman[35344]: 2025-07-07 20:12:49.326415099 -0400 EDT m=+0.125900275 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35357]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 conmon[35357]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:49 managed-node1 podman[35369]: 2025-07-07 20:12:49.372946551 -0400 EDT m=+0.028761302 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:49 managed-node1 podman[35369]: 2025-07-07 20:12:49.385464131 -0400 EDT m=+0.041278816 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4095.\nJul 07 20:12:49 managed-node1 podman[35369]: 2025-07-07 20:12:49.42850947 -0400 EDT m=+0.084324163 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:49 managed-node1 podman[35369]: 2025-07-07 20:12:49.432127873 -0400 EDT m=+0.087942624 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35396]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 conmon[35396]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:49 managed-node1 podman[35400]: 2025-07-07 20:12:49.468834038 -0400 EDT m=+0.022128269 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:49 managed-node1 podman[35400]: 2025-07-07 20:12:49.481687819 -0400 EDT m=+0.034982048 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4100.\nJul 07 20:12:49 managed-node1 podman[35400]: 2025-07-07 20:12:49.528341097 -0400 EDT m=+0.081635334 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 podman[35400]: 2025-07-07 20:12:49.532328195 -0400 EDT m=+0.085622512 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35411]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 podman[35415]: 2025-07-07 20:12:49.564408437 -0400 EDT m=+0.022548782 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:49 managed-node1 podman[35415]: 2025-07-07 20:12:49.577082086 -0400 EDT m=+0.035222377 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4105.\nJul 07 20:12:49 managed-node1 podman[35415]: 2025-07-07 20:12:49.626428781 -0400 EDT m=+0.084569062 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:49 managed-node1 podman[35415]: 2025-07-07 20:12:49.629507264 -0400 EDT m=+0.087647538 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35427]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 podman[35431]: 2025-07-07 20:12:49.663957045 -0400 EDT m=+0.021447599 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:49 managed-node1 podman[35431]: 2025-07-07 20:12:49.676778631 -0400 EDT m=+0.034269117 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4110.\nJul 07 20:12:49 managed-node1 podman[35431]: 2025-07-07 20:12:49.724417963 -0400 EDT m=+0.081908513 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35443]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 podman[35431]: 2025-07-07 20:12:49.728334134 -0400 EDT m=+0.085824587 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:49 managed-node1 podman[35447]: 2025-07-07 20:12:49.763470561 -0400 EDT m=+0.022453377 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 podman[35447]: 2025-07-07 20:12:49.776036134 -0400 EDT m=+0.035018902 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4115.\nJul 07 20:12:49 managed-node1 podman[35447]: 2025-07-07 20:12:49.826175596 -0400 EDT m=+0.085158416 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:49 managed-node1 podman[35447]: 2025-07-07 20:12:49.829136491 -0400 EDT m=+0.088119344 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35458]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 conmon[35458]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:49 managed-node1 podman[35462]: 2025-07-07 20:12:49.860688957 -0400 EDT m=+0.022137746 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:49 managed-node1 podman[35462]: 2025-07-07 20:12:49.873165971 -0400 EDT m=+0.034614682 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:49 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4120.\nJul 07 20:12:49 managed-node1 podman[35462]: 2025-07-07 20:12:49.916149214 -0400 EDT m=+0.077597920 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 podman[35462]: 2025-07-07 20:12:49.919118383 -0400 EDT m=+0.080567116 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:49 managed-node1 auth_test_1_kube-auth_test_1_kube[35473]: This container is intended for podman CI testing\nJul 07 20:12:49 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:49 managed-node1 podman[35477]: 2025-07-07 20:12:49.955542698 -0400 EDT m=+0.022514779 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:49 managed-node1 podman[35477]: 2025-07-07 20:12:49.968269198 -0400 EDT m=+0.035241281 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4125.\nJul 07 20:12:50 managed-node1 podman[35477]: 2025-07-07 20:12:50.014342683 -0400 EDT m=+0.081314820 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 podman[35477]: 2025-07-07 20:12:50.01878655 -0400 EDT m=+0.085758758 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35488]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 podman[35492]: 2025-07-07 20:12:50.053363645 -0400 EDT m=+0.024574745 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35492]: 2025-07-07 20:12:50.066171766 -0400 EDT m=+0.037383115 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4130.\nJul 07 20:12:50 managed-node1 podman[35492]: 2025-07-07 20:12:50.115050823 -0400 EDT m=+0.086262000 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 podman[35492]: 2025-07-07 20:12:50.118910096 -0400 EDT m=+0.090121505 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35503]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 conmon[35503]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:50 managed-node1 podman[35507]: 2025-07-07 20:12:50.153822119 -0400 EDT m=+0.021646500 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 podman[35507]: 2025-07-07 20:12:50.166416474 -0400 EDT m=+0.034240783 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4135.\nJul 07 20:12:50 managed-node1 podman[35507]: 2025-07-07 20:12:50.214244614 -0400 EDT m=+0.082068966 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 podman[35507]: 2025-07-07 20:12:50.217195367 -0400 EDT m=+0.085019732 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35519]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 podman[35523]: 2025-07-07 20:12:50.248680627 -0400 EDT m=+0.020233109 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 podman[35523]: 2025-07-07 20:12:50.261496823 -0400 EDT m=+0.033049364 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4140.\nJul 07 20:12:50 managed-node1 podman[35523]: 2025-07-07 20:12:50.309309961 -0400 EDT m=+0.080862551 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 podman[35523]: 2025-07-07 20:12:50.312592255 -0400 EDT m=+0.084144814 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35534]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.361351804 -0400 EDT m=+0.036190231 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.374459829 -0400 EDT m=+0.049298407 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4145.\nJul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.422744257 -0400 EDT m=+0.097582608 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35595]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 podman[35538]: 2025-07-07 20:12:50.42588667 -0400 EDT m=+0.100725056 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 conmon[35595]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.477186688 -0400 EDT m=+0.035333022 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.491409753 -0400 EDT m=+0.049556363 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4150.\nJul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.552263881 -0400 EDT m=+0.110410323 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35663]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 conmon[35663]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:50 managed-node1 podman[35606]: 2025-07-07 20:12:50.557866714 -0400 EDT m=+0.116012971 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.620063202 -0400 EDT m=+0.048519999 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.633249443 -0400 EDT m=+0.061706156 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4155.\nJul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.692351489 -0400 EDT m=+0.120808216 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35704]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 podman[35686]: 2025-07-07 20:12:50.69827612 -0400 EDT m=+0.126732782 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 python3.9[35698]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.759561182 -0400 EDT m=+0.046849024 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.77735646 -0400 EDT m=+0.064644058 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4160.\nJul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.833194605 -0400 EDT m=+0.120482053 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 podman[35708]: 2025-07-07 20:12:50.836115952 -0400 EDT m=+0.123403410 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35741]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.869261632 -0400 EDT m=+0.023764777 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.881551249 -0400 EDT m=+0.036054336 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4165.\nJul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.928402597 -0400 EDT m=+0.082905709 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:50 managed-node1 podman[35748]: 2025-07-07 20:12:50.931314876 -0400 EDT m=+0.085818011 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:50 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:50 managed-node1 auth_test_1_kube-auth_test_1_kube[35759]: This container is intended for podman CI testing\nJul 07 20:12:50 managed-node1 conmon[35759]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:50 managed-node1 podman[35763]: 2025-07-07 20:12:50.96332082 -0400 EDT m=+0.021467626 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:50 managed-node1 podman[35763]: 2025-07-07 20:12:50.976815265 -0400 EDT m=+0.034961991 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4170.\nJul 07 20:12:51 managed-node1 podman[35763]: 2025-07-07 20:12:51.044375911 -0400 EDT m=+0.102522824 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35798]: This container is intended for podman CI testing\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 podman[35763]: 2025-07-07 20:12:51.049348235 -0400 EDT m=+0.107495274 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.092437769 -0400 EDT m=+0.029222529 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.107275694 -0400 EDT m=+0.044060322 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z)\nJul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4175.\nJul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.15929211 -0400 EDT m=+0.096076800 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 podman[35825]: 2025-07-07 20:12:51.163527993 -0400 EDT m=+0.100312554 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35867]: This container is intended for podman CI testing\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 conmon[35867]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.225315735 -0400 EDT m=+0.048836182 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.239386226 -0400 EDT m=+0.062906669 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:51 managed-node1 python3.9[35925]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4180.\nJul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.466820809 -0400 EDT m=+0.290341396 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:51 managed-node1 podman[35894]: 2025-07-07 20:12:51.470785368 -0400 EDT m=+0.294305786 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35931]: This container is intended for podman CI testing\nJul 07 20:12:51 managed-node1 conmon[35931]: conmon b645ec7d1200dbc33316 : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice/libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope/container/memory.events\nJul 07 20:12:51 managed-node1 systemd[1]: Reloading.\nJul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.552775836 -0400 EDT m=+0.056358134 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0)\nJul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.570347566 -0400 EDT m=+0.073929520 container restart b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test)\nJul 07 20:12:51 managed-node1 systemd-rc-local-generator[35966]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:51 managed-node1 systemd[1]: Stopping A template for running K8s workloads via podman-kube-play...\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4185.\nJul 07 20:12:51 managed-node1 systemd[1]: Started libcrun container.\n\u2591\u2591 Subject: A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4186.\nJul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.780283168 -0400 EDT m=+0.283865102 container init b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:51 managed-node1 auth_test_1_kube-auth_test_1_kube[35985]: This container is intended for podman CI testing\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 podman[35937]: 2025-07-07 20:12:51.794156305 -0400 EDT m=+0.297738289 container start b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.805487564 -0400 EDT m=+0.042136278 pod stop 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 (image=, name=auth_test_1_kube)\nJul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.807535435 -0400 EDT m=+0.044184379 container died b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry)\nJul 07 20:12:51 managed-node1 systemd[1]: var-lib-containers-storage-overlay-6e8d6012bb184b7d9b4ca98710c13c4398e87e9e6ee6aa4809430868bbb52621-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-6e8d6012bb184b7d9b4ca98710c13c4398e87e9e6ee6aa4809430868bbb52621-merged.mount has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.856254498 -0400 EDT m=+0.092903180 container cleanup b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage)\nJul 07 20:12:51 managed-node1 systemd[1]: libpod-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 podman[35983]: 2025-07-07 20:12:51.866791467 -0400 EDT m=+0.103440452 container died feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:51 managed-node1 systemd[1]: run-r8208ec8fcd4d4202907a4db922529a4b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-r8208ec8fcd4d4202907a4db922529a4b.scope has successfully entered the 'dead' state.\nJul 07 20:12:51 managed-node1 kernel: podman1: port 1(veth1) entered disabled state\nJul 07 20:12:51 managed-node1 kernel: veth1 (unregistering): left allmulticast mode\nJul 07 20:12:51 managed-node1 kernel: veth1 (unregistering): left promiscuous mode\nJul 07 20:12:51 managed-node1 kernel: podman1: port 1(veth1) entered disabled state\nJul 07 20:12:51 managed-node1 NetworkManager[642]: [1751933571.9144] device (podman1): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:12:51 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4192.\nJul 07 20:12:51 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4192.\nJul 07 20:12:52 managed-node1 systemd[1]: run-netns-netns\\x2d2411dc39\\x2d3430\\x2d1e50\\x2dd025\\x2d1a201717aa6d.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2d2411dc39\\x2d3430\\x2d1e50\\x2dd025\\x2d1a201717aa6d.mount has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.071289258 -0400 EDT m=+0.307937965 container cleanup feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: Removed slice cgroup machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice.\n\u2591\u2591 Subject: A stop job for unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4258 and the job result is done.\nJul 07 20:12:52 managed-node1 systemd[1]: machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: Consumed 1.746s CPU time.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice completed and consumed the indicated resources.\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.098281515 -0400 EDT m=+0.334930231 container remove b645ec7d1200dbc33316ffb79984a4f6db6baffd0de443a5b780d7d563bb8cb5 (image=localhost:5000/libpod/testimage:20210610, name=auth_test_1_kube-auth_test_1_kube, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, app=test, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.124590961 -0400 EDT m=+0.361239677 container remove feaa7003086c0df8ebbdd8637f7901ac17362c9a97d2d0ed4c6e5ddc62f2d631 (image=, name=2f0bc5d17106-infra, pod_id=2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: Failed to open /run/systemd/transient/machine-libpod_pod_2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7.slice: No such file or directory\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.134201698 -0400 EDT m=+0.370850385 pod remove 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7 (image=, name=auth_test_1_kube)\nJul 07 20:12:52 managed-node1 podman[35983]: Pods stopped:\nJul 07 20:12:52 managed-node1 podman[35983]: 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7\nJul 07 20:12:52 managed-node1 podman[35983]: Pods removed:\nJul 07 20:12:52 managed-node1 podman[35983]: 2f0bc5d17106ec17f38325ab4db59ba549d1dd2f5f242e112da8ca8ab04726d7\nJul 07 20:12:52 managed-node1 podman[35983]: Secrets removed:\nJul 07 20:12:52 managed-node1 podman[35983]: Volumes removed:\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.138635694 -0400 EDT m=+0.375284640 container kill 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 conmon[20791]: conmon 782aa8ef72d65d91e61b : Failed to open cgroups file: /sys/fs/cgroup/machine.slice/libpod-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc.scope/container/memory.events\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.145356852 -0400 EDT m=+0.382005666 container died 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-rootfs-merge.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-rootfs-merge.mount has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 podman[35983]: 2025-07-07 20:12:52.214468795 -0400 EDT m=+0.451117504 container remove 782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc (image=, name=3ef6fcac6278-service, PODMAN_SYSTEMD_UNIT=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service)\nJul 07 20:12:52 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has successfully entered the 'dead' state.\nJul 07 20:12:52 managed-node1 systemd[1]: Stopped A template for running K8s workloads via podman-kube-play.\n\u2591\u2591 Subject: A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A stop job for unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service has finished.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4185 and the job result is done.\nJul 07 20:12:52 managed-node1 systemd[1]: podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service: Consumed 36.014s CPU time.\n\u2591\u2591 Subject: Resources consumed by unit runtime\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service completed and consumed the indicated resources.\nJul 07 20:12:52 managed-node1 python3.9[36210]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:52 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-782aa8ef72d65d91e61ba9fb886296b4920cb9e99083396c86b646296e8bcbbc-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:12:53 managed-node1 python3.9[36361]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None\nJul 07 20:12:53 managed-node1 python3.9[36361]: ansible-containers.podman.podman_play version: 5.5.1, kube file /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml\nJul 07 20:12:53 managed-node1 python3.9[36523]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:12:54 managed-node1 python3.9[36672]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:56 managed-node1 python3.9[36823]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:12:56 managed-node1 systemd[1]: Reloading.\nJul 07 20:12:56 managed-node1 systemd-rc-local-generator[36842]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:12:57 managed-node1 python3.9[37008]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:12:57 managed-node1 python3.9[37308]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:12:59 managed-node1 python3.9[37607]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:00 managed-node1 python3.9[37762]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:01 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:13:03 managed-node1 python3.9[37913]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:05 managed-node1 python3.9[38064]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:06 managed-node1 python3.9[38215]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:07 managed-node1 python3.9[38366]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:08 managed-node1 python3.9[38516]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\\x2dkubernetes.d-auth_test_1_kube.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None\nJul 07 20:13:08 managed-node1 python3.9[38667]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:09 managed-node1 python3.9[38816]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/auth_test_1_kube.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:10 managed-node1 python3.9[38965]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:12 managed-node1 python3.9[39116]: ansible-systemd Invoked with name=auth_test_1_quadlet.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:13:12 managed-node1 systemd[1]: Reloading.\nJul 07 20:13:12 managed-node1 systemd-rc-local-generator[39137]: /etc/rc.d/rc.local is not marked executable, skipping.\nJul 07 20:13:12 managed-node1 python3.9[39301]: ansible-stat Invoked with path=/etc/containers/systemd/auth_test_1_quadlet.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:13 managed-node1 python3.9[39601]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:15 managed-node1 python3.9[39900]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:16 managed-node1 python3.9[40054]: ansible-getent Invoked with database=passwd key=auth_test_user1 fail_key=False service=None split=None\nJul 07 20:13:17 managed-node1 python3.9[40204]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:17 managed-node1 python3.9[40354]: ansible-user Invoked with name=auth_test_user1 state=absent non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:13:18 managed-node1 python3.9[40504]: ansible-file Invoked with path=/home/auth_test_user1 state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:18 managed-node1 python3.9[40653]: ansible-ansible.legacy.command Invoked with _raw_params=podman inspect podman_registry --format '{{range .}}{{range .Mounts}}{{if eq .Type \"volume\"}}{{.Name}}{{end}}{{end}}{{end}}' _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:19 managed-node1 python3.9[40810]: ansible-ansible.legacy.command Invoked with _raw_params=podman rm -f podman_registry _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:19 managed-node1 systemd[1]: libpod-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 podman[40811]: 2025-07-07 20:13:19.265873387 -0400 EDT m=+0.045935077 container died 2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b (image=quay.io/libpod/registry:2.8.2, name=podman_registry)\nJul 07 20:13:19 managed-node1 kernel: podman0: port 1(veth0) entered disabled state\nJul 07 20:13:19 managed-node1 kernel: veth0 (unregistering): left allmulticast mode\nJul 07 20:13:19 managed-node1 kernel: veth0 (unregistering): left promiscuous mode\nJul 07 20:13:19 managed-node1 kernel: podman0: port 1(veth0) entered disabled state\nJul 07 20:13:19 managed-node1 NetworkManager[642]: [1751933599.3183] device (podman0): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed')\nJul 07 20:13:19 managed-node1 systemd[1]: Starting Network Manager Script Dispatcher Service...\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has begun execution\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has begun execution.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4260.\nJul 07 20:13:19 managed-node1 systemd[1]: Started Network Manager Script Dispatcher Service.\n\u2591\u2591 Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 A start job for unit NetworkManager-dispatcher.service has finished successfully.\n\u2591\u2591 \n\u2591\u2591 The job identifier is 4260.\nJul 07 20:13:19 managed-node1 systemd[1]: run-netns-netns\\x2da357660f\\x2d51e9\\x2def21\\x2deb3c\\x2d7f281ab8e18b.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit run-netns-netns\\x2da357660f\\x2d51e9\\x2def21\\x2deb3c\\x2d7f281ab8e18b.mount has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay\\x2dcontainers-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b-userdata-shm.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay\\x2dcontainers-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b-userdata-shm.mount has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay-112b09f11ff8fbba8e451f27769fa8e700d9bb89038833f136b238148dff37fa-merged.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay-112b09f11ff8fbba8e451f27769fa8e700d9bb89038833f136b238148dff37fa-merged.mount has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 podman[40811]: 2025-07-07 20:13:19.49814603 -0400 EDT m=+0.278207625 container remove 2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b (image=quay.io/libpod/registry:2.8.2, name=podman_registry)\nJul 07 20:13:19 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 systemd[1]: libpod-conmon-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit libpod-conmon-2810f669c954eb86ecb9af283de8fd3bb1bc3fec2c4cbb4d9ebaf2dd7f4fe37b.scope has successfully entered the 'dead' state.\nJul 07 20:13:19 managed-node1 python3.9[41023]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume rm 1e074241011384a8157c17bad24c616d5279de9c3f24494baf6b8341065b25b1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:19 managed-node1 podman[41024]: 2025-07-07 20:13:19.910047174 -0400 EDT m=+0.025683882 volume remove 1e074241011384a8157c17bad24c616d5279de9c3f24494baf6b8341065b25b1\nJul 07 20:13:20 managed-node1 python3.9[41180]: ansible-file Invoked with path=/tmp/lsr_g7zmh6pf_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:22 managed-node1 python3.9[41378]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:13:23 managed-node1 python3.9[41553]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:23 managed-node1 python3.9[41702]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:25 managed-node1 python3.9[42000]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:26 managed-node1 python3.9[42155]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:13:26 managed-node1 python3.9[42305]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:28 managed-node1 python3.9[42456]: ansible-tempfile Invoked with state=directory prefix=lsr_podman_config_ suffix= path=None\nJul 07 20:13:28 managed-node1 python3.9[42605]: ansible-ansible.legacy.command Invoked with _raw_params=tar --ignore-failed-read -c -P -v -p -f /tmp/lsr_podman_config_630o0ml1/backup.tar /etc/containers/containers.conf.d/50-systemroles.conf /etc/containers/registries.conf.d/50-systemroles.conf /etc/containers/storage.conf /etc/containers/policy.json _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:29 managed-node1 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state.\nJul 07 20:13:29 managed-node1 python3.9[42755]: ansible-user Invoked with name=user1 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node1 update_password=always uid=None group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None\nJul 07 20:13:29 managed-node1 useradd[42757]: new group: name=user1, GID=1000\nJul 07 20:13:29 managed-node1 useradd[42757]: new user: name=user1, UID=1000, GID=1000, home=/home/user1, shell=/bin/bash, from=/dev/pts/0\nJul 07 20:13:31 managed-node1 python3.9[43061]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:32 managed-node1 python3.9[43217]: ansible-getent Invoked with database=passwd key=user1 fail_key=False service=None split=None\nJul 07 20:13:33 managed-node1 python3.9[43367]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:33 managed-node1 python3.9[43518]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:33 managed-node1 python3.9[43668]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:34 managed-node1 python3.9[43818]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:35 managed-node1 python3.9[43967]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:35 managed-node1 python3.9[44087]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933614.9410055-13310-24639621612646/.source.conf _original_basename=.lefyhyea follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:36 managed-node1 python3.9[44236]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:36 managed-node1 python3.9[44385]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:36 managed-node1 python3.9[44505]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933616.3031077-13365-95635820072900/.source.conf _original_basename=.79pds_qw follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:37 managed-node1 python3.9[44654]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:37 managed-node1 python3.9[44803]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:38 managed-node1 python3.9[44923]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/storage.conf owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933617.4859679-13409-167338433742499/.source.conf _original_basename=.5__9m3z1 follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:38 managed-node1 python3.9[45072]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:38 managed-node1 python3.9[45221]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:39 managed-node1 python3.9[45370]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:39 managed-node1 python3.9[45490]: ansible-ansible.legacy.copy Invoked with dest=/home/user1/.config/containers/policy.json owner=user1 group=user1 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933619.023253-13468-75507327963883/.source.json _original_basename=.gsbubo1d follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:40 managed-node1 python3.9[45639]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:40 managed-node1 python3.9[45790]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:41 managed-node1 python3.9[45940]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:41 managed-node1 python3.9[46090]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:43 managed-node1 python3.9[46508]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:43 managed-node1 python3.9[46659]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:44 managed-node1 python3.9[46809]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:45 managed-node1 python3.9[46959]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:45 managed-node1 python3.9[47110]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:45 managed-node1 python3.9[47261]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:46 managed-node1 python3.9[47412]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:46 managed-node1 python3.9[47563]: ansible-ansible.legacy.command Invoked with _raw_params=grep 'container_name_as_hostname[ ]*=[ ]*true' /home/user1/.config/containers/containers.conf.d/50-systemroles.conf _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:48 managed-node1 python3.9[47862]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:49 managed-node1 python3.9[48017]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:49 managed-node1 python3.9[48168]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:49 managed-node1 python3.9[48318]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:50 managed-node1 python3.9[48468]: ansible-file Invoked with path=/home/user1/.config/containers/containers.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:51 managed-node1 python3.9[48617]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:51 managed-node1 python3.9[48692]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf _original_basename=.yuv1oz8a recurse=False state=file path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:52 managed-node1 python3.9[48841]: ansible-file Invoked with path=/home/user1/.config/containers/registries.conf.d state=directory owner=user1 group=user1 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:52 managed-node1 python3.9[48990]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:52 managed-node1 python3.9[49065]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf _original_basename=.t7udz4o_ recurse=False state=file path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:53 managed-node1 python3.9[49214]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:53 managed-node1 python3.9[49363]: ansible-ansible.legacy.stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:13:53 managed-node1 python3.9[49438]: ansible-ansible.legacy.file Invoked with owner=user1 group=user1 mode=0644 dest=/home/user1/.config/containers/storage.conf _original_basename=.acx818hv recurse=False state=file path=/home/user1/.config/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:54 managed-node1 python3.9[49587]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:54 managed-node1 python3.9[49736]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:55 managed-node1 python3.9[49887]: ansible-slurp Invoked with path=/home/user1/.config/containers/policy.json src=/home/user1/.config/containers/policy.json\nJul 07 20:13:56 managed-node1 python3.9[50036]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:56 managed-node1 python3.9[50187]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:56 managed-node1 python3.9[50337]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:13:57 managed-node1 python3.9[50487]: ansible-file Invoked with path=/home/user1/.config/containers state=directory owner=user1 group=user1 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:13:59 managed-node1 python3.9[50860]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:13:59 managed-node1 python3.9[51011]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:00 managed-node1 python3.9[51161]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g user1 _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:01 managed-node1 python3.9[51311]: ansible-stat Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:01 managed-node1 python3.9[51462]: ansible-stat Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:01 managed-node1 python3.9[51613]: ansible-stat Invoked with path=/home/user1/.config/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:02 managed-node1 python3.9[51764]: ansible-stat Invoked with path=/home/user1/.config/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:03 managed-node1 python3.9[52064]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:04 managed-node1 python3.9[52219]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:05 managed-node1 python3.9[52369]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:05 managed-node1 python3.9[52520]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:06 managed-node1 python3.9[52669]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:06 managed-node1 python3.9[52789]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933646.0899522-14266-164202167319133/.source.conf _original_basename=.f55n9smu follow=False checksum=b1776092f2908d76e11fd6af87267469b2c17d5a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:07 managed-node1 python3.9[52938]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:07 managed-node1 python3.9[53087]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:07 managed-node1 python3.9[53207]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/registries.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933647.2349114-14295-264742941497318/.source.conf _original_basename=.kaocflcp follow=False checksum=fde25488ce7040f1639af7bfc88ed125318cc0b0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:08 managed-node1 python3.9[53356]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:08 managed-node1 python3.9[53505]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:08 managed-node1 python3.9[53627]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/storage.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933648.3819814-14329-124138118490068/.source.conf _original_basename=.cw2ofq3l follow=False checksum=38f015f4780579bd388dd955b42916199fd7fe19 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:09 managed-node1 python3.9[53776]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:09 managed-node1 python3.9[53925]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:10 managed-node1 python3.9[54076]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:10 managed-node1 python3.9[54225]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:10 managed-node1 python3.9[54347]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/policy.json owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933650.3431063-14385-203115853451002/.source.json _original_basename=.9ge15xwj follow=False checksum=6746c079ad563b735fc39f73d4876654b80b0a0d backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:11 managed-node1 python3.9[54496]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:12 managed-node1 python3.9[54647]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:14 managed-node1 python3.9[55067]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:15 managed-node1 python3.9[55218]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:15 managed-node1 python3.9[55369]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:16 managed-node1 python3.9[55520]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:16 managed-node1 python3.9[55671]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:18 managed-node1 python3.9[55971]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:19 managed-node1 python3.9[56126]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:20 managed-node1 python3.9[56277]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:21 managed-node1 python3.9[56426]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:21 managed-node1 python3.9[56501]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/containers.conf.d/50-systemroles.conf _original_basename=.05q0dgxv recurse=False state=file path=/etc/containers/containers.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:22 managed-node1 python3.9[56650]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:22 managed-node1 python3.9[56799]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:22 managed-node1 python3.9[56874]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.p8krhe9y recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:23 managed-node1 python3.9[57023]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:23 managed-node1 python3.9[57172]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:24 managed-node1 python3.9[57247]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.41qn4gp5 recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:24 managed-node1 python3.9[57396]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:24 managed-node1 python3.9[57545]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:25 managed-node1 python3.9[57696]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:26 managed-node1 python3.9[57845]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:27 managed-node1 python3.9[57996]: ansible-file Invoked with path=/root/.config/containers state=directory owner=root group=0 mode=0700 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:28 managed-node1 python3.9[58369]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:29 managed-node1 python3.9[58520]: ansible-stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:30 managed-node1 python3.9[58671]: ansible-stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:30 managed-node1 python3.9[58822]: ansible-stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:30 managed-node1 python3.9[58973]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:31 managed-node1 python3.9[59124]: ansible-slurp Invoked with path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf src=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:32 managed-node1 python3.9[59273]: ansible-slurp Invoked with path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf src=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf\nJul 07 20:14:32 managed-node1 python3.9[59422]: ansible-slurp Invoked with path=/home/user1/.config/containers/storage.conf src=/home/user1/.config/containers/storage.conf\nJul 07 20:14:33 managed-node1 python3.9[59571]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:33 managed-node1 python3.9[59720]: ansible-slurp Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf src=/etc/containers/registries.conf.d/50-systemroles.conf\nJul 07 20:14:34 managed-node1 python3.9[59869]: ansible-slurp Invoked with path=/etc/containers/storage.conf src=/etc/containers/storage.conf\nJul 07 20:14:35 managed-node1 python3.9[60167]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:37 managed-node1 python3.9[60322]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:38 managed-node1 python3.9[60473]: ansible-file Invoked with path=/etc/containers/containers.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:38 managed-node1 python3.9[60622]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:38 managed-node1 python3.9[60744]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/containers.conf.d/50-systemroles.conf owner=root mode=0644 src=/root/.ansible/tmp/ansible-tmp-1751933678.2505736-15338-238322287102956/.source.conf _original_basename=.8f6zzn7v follow=False checksum=9694c1d1c700a6435eecf4066b052584f4ee94c0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:39 managed-node1 python3.9[60893]: ansible-file Invoked with path=/etc/containers/registries.conf.d state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:39 managed-node1 python3.9[61042]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/registries.conf.d/50-systemroles.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:40 managed-node1 python3.9[61117]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/registries.conf.d/50-systemroles.conf _original_basename=.ttqitrgk recurse=False state=file path=/etc/containers/registries.conf.d/50-systemroles.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:40 managed-node1 python3.9[61266]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:41 managed-node1 python3.9[61415]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/storage.conf follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:14:41 managed-node1 python3.9[61490]: ansible-ansible.legacy.file Invoked with owner=root mode=0644 dest=/etc/containers/storage.conf _original_basename=.chl3shxe recurse=False state=file path=/etc/containers/storage.conf force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:41 managed-node1 python3.9[61639]: ansible-file Invoked with path=/etc/containers state=directory owner=root mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:42 managed-node1 python3.9[61788]: ansible-stat Invoked with path=/etc/containers/policy.json follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:42 managed-node1 python3.9[61939]: ansible-slurp Invoked with path=/etc/containers/policy.json src=/etc/containers/policy.json\nJul 07 20:14:43 managed-node1 python3.9[62088]: ansible-slurp Invoked with path=/etc/containers/containers.conf.d/50-systemroles.conf src=/etc/containers/containers.conf.d/50-systemroles.conf\nJul 07 20:14:43 managed-node1 python3.9[62237]: ansible-file Invoked with state=absent path=/etc/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:44 managed-node1 python3.9[62386]: ansible-file Invoked with state=absent path=/etc/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:44 managed-node1 python3.9[62535]: ansible-file Invoked with state=absent path=/etc/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:44 managed-node1 python3.9[62684]: ansible-file Invoked with state=absent path=/etc/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:45 managed-node1 python3.9[62833]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/containers.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:45 managed-node1 python3.9[62982]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/registries.conf.d/50-systemroles.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:45 managed-node1 python3.9[63131]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/storage.conf recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:46 managed-node1 python3.9[63280]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/policy.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:46 managed-node1 python3.9[63429]: ansible-file Invoked with state=absent path=/root/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:46 managed-node1 python3.9[63578]: ansible-file Invoked with state=absent path=/home/user1/.config/containers/auth.json recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:47 managed-node1 python3.9[63727]: ansible-ansible.legacy.command Invoked with _raw_params=tar xfvpP /tmp/lsr_podman_config_630o0ml1/backup.tar _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:47 managed-node1 python3.9[63877]: ansible-file Invoked with state=absent path=/tmp/lsr_podman_config_630o0ml1 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:14:49 managed-node1 python3.9[64075]: ansible-setup Invoked with gather_subset=['!all', '!min', 'distribution', 'distribution_major_version', 'distribution_version', 'os_family'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:14:50 managed-node1 python3.9[64226]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:50 managed-node1 python3.9[64375]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:52 managed-node1 python3.9[64673]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:14:53 managed-node1 python3.9[64828]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:14:53 managed-node1 python3.9[64978]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:14:57 managed-node1 python3.9[65178]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:00 managed-node1 python3.9[65353]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:00 managed-node1 python3.9[65502]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:02 managed-node1 python3.9[65800]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:03 managed-node1 python3.9[65955]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:04 managed-node1 python3.9[66105]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:09 managed-node1 python3.9[66305]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d\nJul 07 20:15:10 managed-node1 python3.9[66480]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:10 managed-node1 python3.9[66629]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:12 managed-node1 python3.9[66927]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:13 managed-node1 python3.9[67082]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None\nJul 07 20:15:14 managed-node1 python3.9[67232]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:16 managed-node1 python3.9[67383]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:17 managed-node1 python3.9[67534]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:18 managed-node1 python3.9[67683]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:18 managed-node1 python3.9[67803]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933717.8906143-17219-30430442070937/.source.container dest=/etc/containers/systemd/nopull.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=670d64fc68a9768edb20cad26df2acc703542d85 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:20 managed-node1 python3.9[68101]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:21 managed-node1 python3.9[68256]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:23 managed-node1 python3.9[68407]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:24 managed-node1 podman[68566]: 2025-07-07 20:15:24.692519338 -0400 EDT m=+0.023060903 image pull-error this_is_a_bogus_image:latest short-name resolution enforced but cannot prompt without a TTY\nJul 07 20:15:24 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:25 managed-node1 python3.9[68721]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:25 managed-node1 python3.9[68870]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/bogus.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True\nJul 07 20:15:25 managed-node1 python3.9[68990]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1751933725.2913904-17409-127642446506455/.source.container dest=/etc/containers/systemd/bogus.container owner=root group=0 mode=0644 follow=False _original_basename=systemd.j2 checksum=1d087e679d135214e8ac9ccaf33b2222916efb7f backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None\nJul 07 20:15:28 managed-node1 python3.9[69288]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:29 managed-node1 python3.9[69443]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:32 managed-node1 python3.9[69594]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:33 managed-node1 python3.9[69745]: ansible-systemd Invoked with name=nopull.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None\nJul 07 20:15:33 managed-node1 python3.9[69895]: ansible-stat Invoked with path=/etc/containers/systemd/nopull.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1\nJul 07 20:15:34 managed-node1 python3.9[70195]: ansible-ansible.legacy.command Invoked with _raw_params=set -x\n set -o pipefail\n exec 1>&2\n #podman volume rm --all\n #podman network prune -f\n podman volume ls\n podman network ls\n podman secret ls\n podman container ls\n podman pod ls\n podman images\n systemctl list-units | grep quadlet\n _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:34 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:34 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:35 managed-node1 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully.\n\u2591\u2591 Subject: Unit succeeded\n\u2591\u2591 Defined-By: systemd\n\u2591\u2591 Support: https://access.redhat.com/support\n\u2591\u2591 \n\u2591\u2591 The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state.\nJul 07 20:15:36 managed-node1 python3.9[70540]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None\nJul 07 20:15:37 managed-node1 python3.9[70695]: ansible-getent Invoked with database=passwd key=user_quadlet_basic fail_key=False service=None split=None\nJul 07 20:15:37 managed-node1 python3.9[70845]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None", "task_name": "Dump journal", "task_path": "/tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336" } ] SYSTEM ROLES ERRORS END v1 TASKS RECAP ******************************************************************** Monday 07 July 2025 20:15:37 -0400 (0:00:00.438) 0:00:29.735 *********** =============================================================================== fedora.linux_system_roles.podman : Gather the package facts ------------- 1.18s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Gathering Facts --------------------------------------------------------- 1.09s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:9 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.87s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.81s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Gather the package facts ------------- 0.80s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.79s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 fedora.linux_system_roles.podman : Stop and disable service ------------- 0.70s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 fedora.linux_system_roles.podman : Ensure quadlet file is present ------- 0.68s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:75 fedora.linux_system_roles.podman : Ensure container images are present --- 0.58s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Get user information ----------------- 0.56s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 fedora.linux_system_roles.podman : Ensure the quadlet directory is present --- 0.55s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Debug3 ------------------------------------------------------------------ 0.54s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:270 fedora.linux_system_roles.podman : Check if system is ostree ------------ 0.52s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 fedora.linux_system_roles.podman : Get podman version ------------------- 0.50s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : Get podman version ------------------- 0.44s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Dump journal ------------------------------------------------------------ 0.44s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_basic.yml:336 fedora.linux_system_roles.podman : Slurp quadlet file ------------------- 0.43s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 fedora.linux_system_roles.podman : See if getsubids exists -------------- 0.40s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 fedora.linux_system_roles.podman : Get podman version ------------------- 0.39s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin --- 0.38s /tmp/collections-AV4/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23