[WARNING]: Collection infra.leapp does not support Ansible version 2.14.18 [WARNING]: running playbook inside collection infra.leapp ansible-playbook [core 2.14.18] config file = /etc/ansible/ansible.cfg configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python3.9/site-packages/ansible ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections executable location = /usr/bin/ansible-playbook python version = 3.9.25 (main, Nov 10 2025, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-11)] (/usr/bin/python3) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_default.yml **************************************************** 1 plays in /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml PLAY [Test] ******************************************************************** TASK [Gathering Facts] ********************************************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:2 ok: [managed-node01] TASK [Test | Run role upgrade] ************************************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:10 TASK [infra.leapp.common : Log directory exists] ******************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:3 ok: [managed-node01] => {"changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/var/log/ripu", "secontext": "unconfined_u:object_r:var_log_t:s0", "size": 6, "state": "directory", "uid": 0} TASK [infra.leapp.common : Check for existing log file] ************************ task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:11 ok: [managed-node01] => {"changed": false, "stat": {"exists": false}} TASK [infra.leapp.common : Fail if log file already exists] ******************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:16 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : Create new log file] ******************************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:23 NOTIFIED HANDLER infra.leapp.common : Check for log file for managed-node01 NOTIFIED HANDLER infra.leapp.common : Add end time to log file for managed-node01 NOTIFIED HANDLER infra.leapp.common : Slurp ripu.log file for managed-node01 NOTIFIED HANDLER infra.leapp.common : Decode ripu.log file for managed-node01 NOTIFIED HANDLER infra.leapp.common : Rename log file for managed-node01 changed: [managed-node01] => {"changed": true, "checksum": "c0670840e62d737ff4b23aad958433105c1216eb", "dest": "/var/log/ripu/ripu.log", "gid": 0, "group": "root", "md5sum": "6bb74004442d15c570dcd3d6a4762be8", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:var_log_t:s0", "size": 61, "src": "/root/.ansible/tmp/ansible-tmp-1766141636.92165-6312-131865250831128/source", "state": "file", "uid": 0} TASK [infra.leapp.common : /etc/ansible/facts.d directory exists] ************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:35 ok: [managed-node01] => {"changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/ansible/facts.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 57, "state": "directory", "uid": 0} TASK [infra.leapp.common : Capture current ansible_facts for validation after upgrade] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:43 changed: [managed-node01] => {"changed": true, "checksum": "92e154011a4e7d781ed331b5a923b6ee36d57d36", "dest": "/etc/ansible/facts.d/pre_ripu.fact", "gid": 0, "group": "root", "md5sum": "5b4e8395ac0c1fdb7c5fb1f03e7f10dc", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 11998, "src": "/root/.ansible/tmp/ansible-tmp-1766141637.7898479-6340-107191548224713/source", "state": "file", "uid": 0} TASK [infra.leapp.common : Capture a list of non-rhel versioned packages] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:51 ok: [managed-node01] => {"changed": false, "cmd": "set -o pipefail; export PATH=$PATH; rpm -qa | grep -ve '[\\.|+]el7' | grep -vE '^(gpg-pubkey|libmodulemd|katello-ca-consumer)' | sort", "delta": "0:00:00.386853", "end": "2025-12-19 05:53:58.941733", "failed_when_result": false, "msg": "", "rc": 0, "start": "2025-12-19 05:53:58.554880", "stderr": "", "stderr_lines": [], "stdout": "epel-release-7-14.noarch\ntps-devel-2.44.50-1.noarch", "stdout_lines": ["epel-release-7-14.noarch", "tps-devel-2.44.50-1.noarch"]} TASK [infra.leapp.common : Create fact with the non-rhel versioned packages list] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:65 ok: [managed-node01] => {"ansible_facts": {"non_rhel_packages": ["epel-release-7-14.noarch", "tps-devel-2.44.50-1.noarch"]}, "changed": false} TASK [infra.leapp.common : Capture the list of non-rhel versioned packages in a separate fact file] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/main.yml:69 ok: [managed-node01] => {"changed": false, "checksum": "6d36b22d9c2b2f366fc090edfbac427c77d524a5", "dest": "/etc/ansible/facts.d/non_rhel_packages.fact", "gid": 0, "group": "root", "mode": "0644", "owner": "root", "path": "/etc/ansible/facts.d/non_rhel_packages.fact", "secontext": "system_u:object_r:etc_t:s0", "size": 58, "state": "file", "uid": 0} TASK [infra.leapp.upgrade : Include tasks for upgrade using redhat-upgrade-tool] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/main.yml:9 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : Include tasks for leapp upgrade] ******************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/main.yml:13 [WARNING]: Collection community.general does not support Ansible version 2.14.18 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml for managed-node01 TASK [leapp-upgrade | Run parse_leapp_report to check for inhibitors] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:2 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Verify no inhibitor results found during preupgrade] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:8 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Register to leapp activation key] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:14 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [leapp-upgrade | Include custom_local_repos for local_repos_pre_leapp] **** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:25 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Install packages for upgrade from RHEL 7] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:33 ok: [managed-node01] => {"changed": false, "changes": {"installed": [], "updated": []}, "msg": "", "rc": 0, "results": ["All packages providing leapp-upgrade are up to date", ""]} TASK [infra.leapp.upgrade : leapp-upgrade | Install packages for upgrade from RHEL 8] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:40 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Install packages for upgrade from RHEL 9] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:47 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Include update-and-reboot.yml] ***** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:54 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/update-and-reboot.yml for managed-node01 TASK [infra.leapp.upgrade : update-and-reboot | Ensure all updates are applied] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/update-and-reboot.yml:2 ASYNC POLL on managed-node01: jid=j177068004437.10810 started=1 finished=0 ASYNC OK on managed-node01: jid=j177068004437.10810 changed: [managed-node01] => {"ansible_job_id": "j177068004437.10810", "changed": true, "changes": {"installed": [], "updated": [["kernel", "3.10.0-1160.120.1.el7.x86_64 from rhel"], ["kernel-tools-libs", "3.10.0-1160.120.1.el7.x86_64 from rhel"], ["kernel-tools", "3.10.0-1160.120.1.el7.x86_64 from rhel"], ["python-perf", "3.10.0-1160.120.1.el7.x86_64 from rhel"]]}, "finished": 1, "msg": "", "rc": 0, "results": ["Loaded plugins: product-id, search-disabled-repos, subscription-manager\n\nThis system is not registered with an entitlement server. You can use subscription-manager to register.\n\nResolving Dependencies\n--> Running transaction check\n---> Package kernel.x86_64 0:3.10.0-1160.120.1.el7 will be installed\n---> Package kernel-tools.x86_64 0:3.10.0-1160.119.1.el7 will be updated\n---> Package kernel-tools.x86_64 0:3.10.0-1160.120.1.el7 will be an update\n---> Package kernel-tools-libs.x86_64 0:3.10.0-1160.119.1.el7 will be updated\n---> Package kernel-tools-libs.x86_64 0:3.10.0-1160.120.1.el7 will be an update\n---> Package python-perf.x86_64 0:3.10.0-1160.119.1.el7 will be updated\n---> Package python-perf.x86_64 0:3.10.0-1160.120.1.el7 will be an update\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository\n Size\n================================================================================\nInstalling:\n kernel x86_64 3.10.0-1160.120.1.el7 rhel 52 M\nUpdating:\n kernel-tools x86_64 3.10.0-1160.120.1.el7 rhel 8.2 M\n kernel-tools-libs x86_64 3.10.0-1160.120.1.el7 rhel 8.1 M\n python-perf x86_64 3.10.0-1160.120.1.el7 rhel 8.2 M\n\nTransaction Summary\n================================================================================\nInstall 1 Package\nUpgrade 3 Packages\n\nTotal download size: 76 M\nDownloading packages:\nDelta RPMs disabled because /usr/bin/applydeltarpm not installed.\n--------------------------------------------------------------------------------\nTotal 48 MB/s | 76 MB 00:01 \nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Updating : kernel-tools-libs-3.10.0-1160.120.1.el7.x86_64 1/7 \n Updating : kernel-tools-3.10.0-1160.120.1.el7.x86_64 2/7 \n Installing : kernel-3.10.0-1160.120.1.el7.x86_64 3/7 \n Updating : python-perf-3.10.0-1160.120.1.el7.x86_64 4/7 \n Cleanup : kernel-tools-3.10.0-1160.119.1.el7.x86_64 5/7 \n Cleanup : kernel-tools-libs-3.10.0-1160.119.1.el7.x86_64 6/7 \n Cleanup : python-perf-3.10.0-1160.119.1.el7.x86_64 7/7 \n Verifying : kernel-tools-libs-3.10.0-1160.120.1.el7.x86_64 1/7 \n Verifying : kernel-tools-3.10.0-1160.120.1.el7.x86_64 2/7 \n Verifying : python-perf-3.10.0-1160.120.1.el7.x86_64 3/7 \n Verifying : kernel-3.10.0-1160.120.1.el7.x86_64 4/7 \n Verifying : python-perf-3.10.0-1160.119.1.el7.x86_64 5/7 \n Verifying : kernel-tools-libs-3.10.0-1160.119.1.el7.x86_64 6/7 \n Verifying : kernel-tools-3.10.0-1160.119.1.el7.x86_64 7/7 \n\nInstalled:\n kernel.x86_64 0:3.10.0-1160.120.1.el7 \n\nUpdated:\n kernel-tools.x86_64 0:3.10.0-1160.120.1.el7 \n kernel-tools-libs.x86_64 0:3.10.0-1160.120.1.el7 \n python-perf.x86_64 0:3.10.0-1160.120.1.el7 \n\nComplete!\n"], "results_file": "/root/.ansible_async/j177068004437.10810", "started": 1, "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []} TASK [infra.leapp.upgrade : update-and-reboot | Reboot when updates applied] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/update-and-reboot.yml:10 changed: [managed-node01] => {"changed": true, "elapsed": 120, "rebooted": true} TASK [leapp-upgrade | Create /etc/leapp/files/leapp_upgrade_repositories.repo] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:58 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.upgrade : leapp-upgrade | Include disable-previous-repo-files.yml] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:69 skipping: [managed-node01] => {"changed": false, "skipped_reason": "No items in the list"} TASK [infra.leapp.upgrade : leapp-upgrade | Include remove-kernel-modules.yml] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:75 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/remove-kernel-modules.yml for managed-node01 TASK [infra.leapp.upgrade : remove-kernel-modules | Get names of loaded kernel modules] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/remove-kernel-modules.yml:2 ok: [managed-node01] => {"changed": false, "cmd": ["awk", "{print $1}", "/proc/modules"], "delta": "0:00:00.004464", "end": "2025-12-19 05:58:04.354295", "msg": "", "rc": 0, "start": "2025-12-19 05:58:04.349831", "stderr": "", "stderr_lines": [], "stdout": "sb_edac\niosf_mbi\ncrc32_pclmul\nghash_clmulni_intel\nppdev\naesni_intel\nlrw\ngf128mul\nglue_helper\nablk_helper\ncryptd\ni2c_piix4\npcspkr\nparport_pc\nparport\nsunrpc\nip_tables\nxfs\nlibcrc32c\nata_generic\npata_acpi\ncirrus\ndrm_kms_helper\nsyscopyarea\nsysfillrect\nsysimgblt\nfb_sys_fops\nttm\ndrm\nata_piix\nlibata\ncrct10dif_pclmul\nxen_blkfront\ncrct10dif_common\nxen_netfront\ncrc32c_intel\nserio_raw\ndrm_panel_orientation_quirks", "stdout_lines": ["sb_edac", "iosf_mbi", "crc32_pclmul", "ghash_clmulni_intel", "ppdev", "aesni_intel", "lrw", "gf128mul", "glue_helper", "ablk_helper", "cryptd", "i2c_piix4", "pcspkr", "parport_pc", "parport", "sunrpc", "ip_tables", "xfs", "libcrc32c", "ata_generic", "pata_acpi", "cirrus", "drm_kms_helper", "syscopyarea", "sysfillrect", "sysimgblt", "fb_sys_fops", "ttm", "drm", "ata_piix", "libata", "crct10dif_pclmul", "xen_blkfront", "crct10dif_common", "xen_netfront", "crc32c_intel", "serio_raw", "drm_panel_orientation_quirks"]} TASK [infra.leapp.upgrade : remove-kernel-modules | Remove specified kernel modules] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/remove-kernel-modules.yml:8 skipping: [managed-node01] => {"changed": false, "skipped_reason": "No items in the list"} TASK [infra.leapp.upgrade : leapp-upgrade | Start Leapp OS upgrade] ************ task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:82 ASYNC FAILED on managed-node01: jid=j558324938336.7944 fatal: [managed-node01]: FAILED! => {"ansible_job_id": "j558324938336.7944", "changed": true, "cmd": "set -o pipefail; export PATH=$PATH; ulimit -n 16384; leapp upgrade --report-schema=1.2.0 2>&1 | tee -a /var/log/ripu/ripu.log\n", "delta": "0:00:37.327427", "end": "2025-12-19 05:58:42.136320", "finished": 1, "msg": "non-zero return code", "rc": 1, "results_file": "/root/.ansible_async/j558324938336.7944", "start": "2025-12-19 05:58:04.808893", "started": 1, "stderr": "", "stderr_lines": [], "stdout": "==> Processing phase `configuration_phase`\n====> * ipu_workflow_config\n IPU workflow config actor\n==> Processing phase `FactsCollection`\n====> * transaction_workarounds\n Provides additional RPM transaction tasks based on bundled RPM packages.\n====> * source_boot_loader_scanner\n Scans the boot loader configuration on the source system.\n====> * authselect_scanner\n Detect what authselect configuration should be suggested to administrator.\n====> * scan_files_for_target_userspace\n Scan the source system and identify files that will be copied into the target userspace when it is created.\n====> * scan_kernel_cmdline\n No documentation has been provided for the scan_kernel_cmdline actor.\n====> * tcp_wrappers_config_read\n Parse tcp_wrappers configuration files /etc/hosts.{allow,deny}.\n====> * repository_mapping\n Produces message containing repository mapping based on provided file.\n====> * scan_source_files\n Scan files (explicitly specified) of the source system.\n====> * root_scanner\n Scan the system root directory and produce a message containing\n====> * common_leapp_dracut_modules\n Influences the generation of the initram disk\n====> * scanclienablerepo\n Produce CustomTargetRepository based on the LEAPP_ENABLE_REPOS in config.\n====> * rpm_scanner\n Provides data about installed RPM Packages.\nLoaded plugins: product-id, subscription-manager\n\nThis system is not registered with an entitlement server. You can use subscription-manager to register.\n\n====> * scan_grub_config\n Scan grub configuration files for errors.\n====> * scan_custom_repofile\n Scan the custom /etc/leapp/files/leapp_upgrade_repositories.repo repo file.\n====> * scan_custom_modifications_actor\n Collects information about files in leapp directories that have been modified or newly added.\n====> * scanmemory\n Scan Memory of the machine.\n====> * checkrhui\n Check if system is using RHUI infrastructure (on public cloud) and send messages to\n====> * system_facts\n Provides data about many facts from system.\n====> * sssd_facts\n Check SSSD configuration for changes in RHEL8 and report them in model.\n====> * load_device_driver_deprecation_data\n Loads deprecation data for drivers and devices (PCI & CPU)\n====> * pam_modules_scanner\n Scan the pam directory for services and modules used in them\n====> * read_openssh_config\n Collect information about the OpenSSH configuration.\n====> * scan_fips\n Determine whether the source system has FIPS enabled.\n====> * firewalld_facts_actor\n Provide data about firewalld\n====> * selinuxcontentscanner\n Scan the system for any SELinux customizations\n====> * pci_devices_scanner\n Provides data about existing PCI Devices.\n====> * copy_dnf_conf_into_target_userspace\n Copy dnf.conf into target userspace\n====> * persistentnetnames\n Get network interface information for physical ethernet interfaces of the original system.\n====> * distribution_signed_rpm_scanner\n Provide data about distribution signed & unsigned RPM packages.\n====> * removed_pam_modules_scanner\n Scan PAM configuration for modules that are not available in RHEL-8.\n====> * network_manager_read_config\n Provides data about NetworkManager configuration.\n====> * satellite_upgrade_facts\n Report which Satellite packages require updates and how to handle PostgreSQL data\n====> * scan_grub_device_name\n Find the name of the block devices where GRUB is located\n====> * cups_scanner\n Gather facts about CUPS features which needs to be migrated\n====> * get_installed_desktops\n Actor checks if kde or gnome desktop environments\n====> * udevadm_info\n Produces data exported by the \"udevadm info\" command.\n====> * register_yum_adjustment\n Registers a workaround which will adjust the yum directories during the upgrade.\n====> * persistentnetnamesdisable\n Disable systemd-udevd persistent network naming on machine with single eth0 NIC\n====> * scanzfcp\n In case of s390x architecture, check whether ZFCP is used.\n====> * check_kde_apps\n Actor checks which KDE apps are installed.\n====> * scan_pkg_manager\n Provides data about package manager (yum/dnf)\n====> * scan_systemd_source\n Provides info about systemd on the source system\n====> * scan_sap_hana\n Gathers information related to SAP HANA instances on the system.\n====> * scandasd\n In case of s390x architecture, check whether DASD is used.\n====> * scan_source_kernel\n Scan the source system kernel.\n====> * storage_scanner\n Provides data about storage settings.\n====> * multipath_conf_read\n Read multipath configuration files and extract the necessary information\n====> * check_grub_legacy\n Check whether GRUB Legacy is installed in the MBR.\n====> * trusted_gpg_keys_scanner\n Scan for trusted GPG keys.\n====> * remove_obsolete_gpg_keys\n Remove obsoleted RPM GPG keys.\n====> * scan_target_os_image\n Scans the provided target OS ISO image to use as a content source for the IPU, if any.\n====> * get_enabled_modules\n Provides data about which module streams are enabled on the source system.\n====> * scan_subscription_manager_info\n Scans the current system for subscription manager information\n====> * scancpu\n Scan CPUs of the machine.\n====> * repositories_blacklist\n Exclude target repositories provided by Red Hat without support.\n====> * used_repository_scanner\n Scan used enabled repositories\n====> * quagga_daemons\n Active quagga daemons check.\n====> * rpm_transaction_config_tasks_collector\n Provides additional RPM transaction tasks from /etc/leapp/transaction.\n====> * sctp_read_status\n Determines whether or not the SCTP kernel module might be wanted.\n====> * vsftpd_config_read\n Reads vsftpd configuration files (/etc/vsftpd/*.conf) and extracts necessary information.\n====> * ipa_scanner\n Scan system for ipa-client and ipa-server status\n====> * spamassassin_config_read\n Reads spamc configuration (/etc/mail/spamassassin/spamc.conf), the\n====> * biosdevname\n Enable biosdevname on the target RHEL system if all interfaces on the source RHEL\n====> * scan_grub_device_partition_layout\n Scan all identified GRUB devices for their partition layout.\n====> * detect_kernel_drivers\n Matches all currently loaded kernel drivers against known deprecated and removed drivers.\n====> * scan_dynamic_linker_configuration\n Scan the dynamic linker configuration and find modifications.\n====> * xfs_info_scanner\n This actor scans all mounted mountpoints for XFS information\n====> * pes_events_scanner\n Provides data about package events from Package Evolution Service.\n====> * setuptargetrepos\n Produces list of repositories that should be available to be used by Upgrade process.\n\n============================================================\n ERRORS \n============================================================\n\n2025-12-19 05:58:36.676429 [ERROR] Actor: scan_source_kernel\nMessage: Unable to identify package providing the booted kernel.\n2025-12-19 05:58:38.893089 [ERROR] Actor: scan_subscription_manager_info\nMessage: A subscription-manager command failed to execute\nSummary:\n Link: https://access.redhat.com/solutions/6138372\n Details: Command ['subscription-manager', 'release'] failed with exit code 1.\n Stderr: This system is not yet registered. Try 'subscription-manager register --help' for more information.\n Hint: Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\n\n============================================================\n END OF ERRORS \n============================================================\n\nDebug output written to /var/log/leapp/leapp-upgrade.log\n\n============================================================\n REPORT OVERVIEW \n============================================================\n\nFollowing errors occurred and the upgrade cannot continue:\n 1. Actor: scan_source_kernel\n Message: Unable to identify package providing the booted kernel.\n 2. Actor: scan_subscription_manager_info\n Message: A subscription-manager command failed to execute\n\nHIGH and MEDIUM severity reports:\n 1. Packages available in excluded repositories will not be installed\n\nReports summary:\n Errors: 2\n Inhibitors: 0\n HIGH severity reports: 1\n MEDIUM severity reports: 0\n LOW severity reports: 0\n INFO severity reports: 1\n\nBefore continuing, review the full report below for details about discovered problems and possible remediation instructions:\n A report has been generated at /var/log/leapp/leapp-report.txt\n A report has been generated at /var/log/leapp/leapp-report.json\n\n============================================================\n END OF REPORT OVERVIEW \n============================================================\n\nAnswerfile has been generated at /var/log/leapp/answerfile", "stdout_lines": ["==> Processing phase `configuration_phase`", "====> * ipu_workflow_config", " IPU workflow config actor", "==> Processing phase `FactsCollection`", "====> * transaction_workarounds", " Provides additional RPM transaction tasks based on bundled RPM packages.", "====> * source_boot_loader_scanner", " Scans the boot loader configuration on the source system.", "====> * authselect_scanner", " Detect what authselect configuration should be suggested to administrator.", "====> * scan_files_for_target_userspace", " Scan the source system and identify files that will be copied into the target userspace when it is created.", "====> * scan_kernel_cmdline", " No documentation has been provided for the scan_kernel_cmdline actor.", "====> * tcp_wrappers_config_read", " Parse tcp_wrappers configuration files /etc/hosts.{allow,deny}.", "====> * repository_mapping", " Produces message containing repository mapping based on provided file.", "====> * scan_source_files", " Scan files (explicitly specified) of the source system.", "====> * root_scanner", " Scan the system root directory and produce a message containing", "====> * common_leapp_dracut_modules", " Influences the generation of the initram disk", "====> * scanclienablerepo", " Produce CustomTargetRepository based on the LEAPP_ENABLE_REPOS in config.", "====> * rpm_scanner", " Provides data about installed RPM Packages.", "Loaded plugins: product-id, subscription-manager", "", "This system is not registered with an entitlement server. You can use subscription-manager to register.", "", "====> * scan_grub_config", " Scan grub configuration files for errors.", "====> * scan_custom_repofile", " Scan the custom /etc/leapp/files/leapp_upgrade_repositories.repo repo file.", "====> * scan_custom_modifications_actor", " Collects information about files in leapp directories that have been modified or newly added.", "====> * scanmemory", " Scan Memory of the machine.", "====> * checkrhui", " Check if system is using RHUI infrastructure (on public cloud) and send messages to", "====> * system_facts", " Provides data about many facts from system.", "====> * sssd_facts", " Check SSSD configuration for changes in RHEL8 and report them in model.", "====> * load_device_driver_deprecation_data", " Loads deprecation data for drivers and devices (PCI & CPU)", "====> * pam_modules_scanner", " Scan the pam directory for services and modules used in them", "====> * read_openssh_config", " Collect information about the OpenSSH configuration.", "====> * scan_fips", " Determine whether the source system has FIPS enabled.", "====> * firewalld_facts_actor", " Provide data about firewalld", "====> * selinuxcontentscanner", " Scan the system for any SELinux customizations", "====> * pci_devices_scanner", " Provides data about existing PCI Devices.", "====> * copy_dnf_conf_into_target_userspace", " Copy dnf.conf into target userspace", "====> * persistentnetnames", " Get network interface information for physical ethernet interfaces of the original system.", "====> * distribution_signed_rpm_scanner", " Provide data about distribution signed & unsigned RPM packages.", "====> * removed_pam_modules_scanner", " Scan PAM configuration for modules that are not available in RHEL-8.", "====> * network_manager_read_config", " Provides data about NetworkManager configuration.", "====> * satellite_upgrade_facts", " Report which Satellite packages require updates and how to handle PostgreSQL data", "====> * scan_grub_device_name", " Find the name of the block devices where GRUB is located", "====> * cups_scanner", " Gather facts about CUPS features which needs to be migrated", "====> * get_installed_desktops", " Actor checks if kde or gnome desktop environments", "====> * udevadm_info", " Produces data exported by the \"udevadm info\" command.", "====> * register_yum_adjustment", " Registers a workaround which will adjust the yum directories during the upgrade.", "====> * persistentnetnamesdisable", " Disable systemd-udevd persistent network naming on machine with single eth0 NIC", "====> * scanzfcp", " In case of s390x architecture, check whether ZFCP is used.", "====> * check_kde_apps", " Actor checks which KDE apps are installed.", "====> * scan_pkg_manager", " Provides data about package manager (yum/dnf)", "====> * scan_systemd_source", " Provides info about systemd on the source system", "====> * scan_sap_hana", " Gathers information related to SAP HANA instances on the system.", "====> * scandasd", " In case of s390x architecture, check whether DASD is used.", "====> * scan_source_kernel", " Scan the source system kernel.", "====> * storage_scanner", " Provides data about storage settings.", "====> * multipath_conf_read", " Read multipath configuration files and extract the necessary information", "====> * check_grub_legacy", " Check whether GRUB Legacy is installed in the MBR.", "====> * trusted_gpg_keys_scanner", " Scan for trusted GPG keys.", "====> * remove_obsolete_gpg_keys", " Remove obsoleted RPM GPG keys.", "====> * scan_target_os_image", " Scans the provided target OS ISO image to use as a content source for the IPU, if any.", "====> * get_enabled_modules", " Provides data about which module streams are enabled on the source system.", "====> * scan_subscription_manager_info", " Scans the current system for subscription manager information", "====> * scancpu", " Scan CPUs of the machine.", "====> * repositories_blacklist", " Exclude target repositories provided by Red Hat without support.", "====> * used_repository_scanner", " Scan used enabled repositories", "====> * quagga_daemons", " Active quagga daemons check.", "====> * rpm_transaction_config_tasks_collector", " Provides additional RPM transaction tasks from /etc/leapp/transaction.", "====> * sctp_read_status", " Determines whether or not the SCTP kernel module might be wanted.", "====> * vsftpd_config_read", " Reads vsftpd configuration files (/etc/vsftpd/*.conf) and extracts necessary information.", "====> * ipa_scanner", " Scan system for ipa-client and ipa-server status", "====> * spamassassin_config_read", " Reads spamc configuration (/etc/mail/spamassassin/spamc.conf), the", "====> * biosdevname", " Enable biosdevname on the target RHEL system if all interfaces on the source RHEL", "====> * scan_grub_device_partition_layout", " Scan all identified GRUB devices for their partition layout.", "====> * detect_kernel_drivers", " Matches all currently loaded kernel drivers against known deprecated and removed drivers.", "====> * scan_dynamic_linker_configuration", " Scan the dynamic linker configuration and find modifications.", "====> * xfs_info_scanner", " This actor scans all mounted mountpoints for XFS information", "====> * pes_events_scanner", " Provides data about package events from Package Evolution Service.", "====> * setuptargetrepos", " Produces list of repositories that should be available to be used by Upgrade process.", "", "============================================================", " ERRORS ", "============================================================", "", "2025-12-19 05:58:36.676429 [ERROR] Actor: scan_source_kernel", "Message: Unable to identify package providing the booted kernel.", "2025-12-19 05:58:38.893089 [ERROR] Actor: scan_subscription_manager_info", "Message: A subscription-manager command failed to execute", "Summary:", " Link: https://access.redhat.com/solutions/6138372", " Details: Command ['subscription-manager', 'release'] failed with exit code 1.", " Stderr: This system is not yet registered. Try 'subscription-manager register --help' for more information.", " Hint: Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.", "", "============================================================", " END OF ERRORS ", "============================================================", "", "Debug output written to /var/log/leapp/leapp-upgrade.log", "", "============================================================", " REPORT OVERVIEW ", "============================================================", "", "Following errors occurred and the upgrade cannot continue:", " 1. Actor: scan_source_kernel", " Message: Unable to identify package providing the booted kernel.", " 2. Actor: scan_subscription_manager_info", " Message: A subscription-manager command failed to execute", "", "HIGH and MEDIUM severity reports:", " 1. Packages available in excluded repositories will not be installed", "", "Reports summary:", " Errors: 2", " Inhibitors: 0", " HIGH severity reports: 1", " MEDIUM severity reports: 0", " LOW severity reports: 0", " INFO severity reports: 1", "", "Before continuing, review the full report below for details about discovered problems and possible remediation instructions:", " A report has been generated at /var/log/leapp/leapp-report.txt", " A report has been generated at /var/log/leapp/leapp-report.json", "", "============================================================", " END OF REPORT OVERVIEW ", "============================================================", "", "Answerfile has been generated at /var/log/leapp/answerfile"]} TASK [leapp-upgrade | Run parse_leapp_report to check for inhibitors] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:98 TASK [infra.leapp.common : parse_leapp_report | Default upgrade_inhibited to false] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:12 ok: [managed-node01] => {"ansible_facts": {"upgrade_inhibited": false}, "changed": false} TASK [infra.leapp.common : parse_leapp_report | Collect human readable report results] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:16 ok: [managed-node01] => {"changed": false, "content": "UmlzayBGYWN0b3I6IGhpZ2ggKGVycm9yKQpUaXRsZTogVW5hYmxlIHRvIGlkZW50aWZ5IHBhY2thZ2UgcHJvdmlkaW5nIHRoZSBib290ZWQga2VybmVsLgpTdW1tYXJ5OiAKS2V5OiA0NmY3NmY2MTI0YjU1MzdlNWRmZDBlNmMyNTBkM2Y0MmM4YTk3NjkxCi0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0KUmlzayBGYWN0b3I6IGhpZ2ggKGVycm9yKQpUaXRsZTogQSBzdWJzY3JpcHRpb24tbWFuYWdlciBjb21tYW5kIGZhaWxlZCB0byBleGVjdXRlClN1bW1hcnk6IHsibGluayI6ICJodHRwczovL2FjY2Vzcy5yZWRoYXQuY29tL3NvbHV0aW9ucy82MTM4MzcyIiwgImRldGFpbHMiOiAiQ29tbWFuZCBbJ3N1YnNjcmlwdGlvbi1tYW5hZ2VyJywgJ3JlbGVhc2UnXSBmYWlsZWQgd2l0aCBleGl0IGNvZGUgMS4iLCAic3RkZXJyIjogIlRoaXMgc3lzdGVtIGlzIG5vdCB5ZXQgcmVnaXN0ZXJlZC4gVHJ5ICdzdWJzY3JpcHRpb24tbWFuYWdlciByZWdpc3RlciAtLWhlbHAnIGZvciBtb3JlIGluZm9ybWF0aW9uLlxuIiwgImhpbnQiOiAiUGxlYXNlIGVuc3VyZSB5b3UgaGF2ZSBhIHZhbGlkIFJIRUwgc3Vic2NyaXB0aW9uIGFuZCB5b3VyIG5ldHdvcmsgaXMgdXAuIElmIHlvdSBhcmUgdXNpbmcgcHJveHkgZm9yIFJlZCBIYXQgc3Vic2NyaXB0aW9uLW1hbmFnZXIsIHBsZWFzZSBtYWtlIHN1cmUgaXQgaXMgc3BlY2lmaWVkIGluc2lkZSB0aGUgL2V0Yy9yaHNtL3Joc20uY29uZiBmaWxlLiBPciB1c2UgdGhlIC0tbm8tcmhzbSBvcHRpb24gd2hlbiBydW5uaW5nIGxlYXBwLCBpZiB5b3UgZG8gbm90IHdhbnQgdG8gdXNlIHN1YnNjcmlwdGlvbi1tYW5hZ2VyIGZvciB0aGUgaW4tcGxhY2UgdXBncmFkZSBhbmQgeW91IHdhbnQgdG8gZGVsaXZlciBhbGwgdGFyZ2V0IHJlcG9zaXRvcmllcyBieSB5b3Vyc2VsZiBvciB1c2luZyBSSFVJIG9uIHB1YmxpYyBjbG91ZC4ifQpLZXk6IDdlYzgyNjk3ODRkYjFiYmEyYWM1NGFlNDM4Njg5ZWYzOTdlMTY4MzMKLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQpSaXNrIEZhY3RvcjogaGlnaCAKVGl0bGU6IFBhY2thZ2VzIGF2YWlsYWJsZSBpbiBleGNsdWRlZCByZXBvc2l0b3JpZXMgd2lsbCBub3QgYmUgaW5zdGFsbGVkClN1bW1hcnk6IDIgcGFja2FnZXMgd2lsbCBiZSBza2lwcGVkIGJlY2F1c2UgdGhleSBhcmUgYXZhaWxhYmxlIG9ubHkgaW4gdGFyZ2V0IHN5c3RlbSByZXBvc2l0b3JpZXMgdGhhdCBhcmUgaW50ZW50aW9uYWxseSBleGNsdWRlZCBmcm9tIHRoZSBsaXN0IG9mIHJlcG9zaXRvcmllcyB1c2VkIGR1cmluZyB0aGUgdXBncmFkZS4gU2VlIHRoZSByZXBvcnQgbWVzc2FnZSB0aXRsZWQgIkV4Y2x1ZGVkIHRhcmdldCBzeXN0ZW0gcmVwb3NpdG9yaWVzIiBmb3IgZGV0YWlscy4KVGhlIGxpc3Qgb2YgdGhlc2UgcGFja2FnZXM6Ci0gcHl0aG9uMy1weXhhdHRyIChyZXBvaWQ6IGNvZGVyZWFkeS1idWlsZGVyLWZvci1yaGVsLTgteDg2XzY0LXJwbXMpCi0gcnBjZ2VuIChyZXBvaWQ6IGNvZGVyZWFkeS1idWlsZGVyLWZvci1yaGVsLTgteDg2XzY0LXJwbXMpCktleTogMjQzN2UyMDQ4MDhmOTg3NDc3YzBlOWJlOGU0Yzk1YjNhODdhOWYzZQotLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tClJpc2sgRmFjdG9yOiBpbmZvIApUaXRsZTogRXhjbHVkZWQgdGFyZ2V0IHN5c3RlbSByZXBvc2l0b3JpZXMKU3VtbWFyeTogVGhlIGZvbGxvd2luZyByZXBvc2l0b3JpZXMgYXJlIG5vdCBzdXBwb3J0ZWQgYnkgUmVkIEhhdCBhbmQgYXJlIGV4Y2x1ZGVkIGZyb20gdGhlIGxpc3Qgb2YgcmVwb3NpdG9yaWVzIHVzZWQgZHVyaW5nIHRoZSB1cGdyYWRlLgotIGNvZGVyZWFkeS1idWlsZGVyLWJldGEtZm9yLXJoZWwtOC1zMzkweC1ycG1zCi0gY29kZXJlYWR5LWJ1aWxkZXItYmV0YS1mb3ItcmhlbC04LXBwYzY0bGUtcnBtcwotIHJodWktY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC14ODZfNjQtcmh1aS1ycG1zCi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC1hYXJjaDY0LWV1cy1ycG1zCi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC1wcGM2NGxlLWV1cy1ycG1zCi0gY29kZXJlYWR5LWJ1aWxkZXItYmV0YS1mb3ItcmhlbC04LXg4Nl82NC1ycG1zCi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC1hYXJjaDY0LXJwbXMKLSBjb2RlcmVhZHktYnVpbGRlci1mb3ItcmhlbC04LXMzOTB4LXJwbXMKLSBjb2RlcmVhZHktYnVpbGRlci1mb3ItcmhlbC04LXMzOTB4LWV1cy1ycG1zCi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC14ODZfNjQtZXVzLXJwbXMKLSByaHVpLWNvZGVyZWFkeS1idWlsZGVyLWZvci1yaGVsLTgtYWFyY2g2NC1yaHVpLXJwbXMKLSBjb2RlcmVhZHktYnVpbGRlci1iZXRhLWZvci1yaGVsLTgtYWFyY2g2NC1ycG1zCi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC1yaHVpLXJwbXMKLSBjb2RlcmVhZHktYnVpbGRlci1mb3ItcmhlbC04LXg4Nl82NC1yaHVpLXJwbXMKLSBjb2RlcmVhZHktYnVpbGRlci1mb3ItcmhlbC04LXg4Nl82NC1ycG1zCi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC14ODZfNjQtZXVzLXJodWktcnBtcwotIGNvZGVyZWFkeS1idWlsZGVyLWZvci1yaGVsLTgtcHBjNjRsZS1ycG1zClJlbWVkaWF0aW9uOiBbaGludF0gSWYgc29tZSBvZiBleGNsdWRlZCByZXBvc2l0b3JpZXMgYXJlIHN0aWxsIHJlcXVpcmVkIHRvIGJlIHVzZWQgZHVyaW5nIHRoZSB1cGdyYWRlLCBleGVjdXRlIGxlYXBwIHdpdGggdGhlIC0tZW5hYmxlcmVwbyBvcHRpb24gd2l0aCB0aGUgcmVwb2lkIG9mIHRoZSByZXBvc2l0b3J5IHJlcXVpcmVkIHRvIGJlIGVuYWJsZWQgYXMgYW4gYXJndW1lbnQgKHRoZSBvcHRpb24gY2FuIGJlIHVzZWQgbXVsdGlwbGUgdGltZXMpLgpLZXk6IDFiOTEzMmNiMjM2MmFlNzgzMGU0OGVlZTc4MTFiZTk1Mjc3NDdkZTgKLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLS0tLQo=", "encoding": "base64", "source": "/var/log/leapp/leapp-report.txt"} TASK [infra.leapp.common : parse_leapp_report | Collect JSON report results] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:21 ok: [managed-node01] => {"changed": false, "content": "ewogICJsZWFwcF9ydW5faWQiOiAiYTkzZGU4NGUtOGU0Ny00ZmY2LThlZGMtMWQ0YTgzOTFmYjcwIiwgCiAgImVudHJpZXMiOiBbCiAgICB7CiAgICAgICJncm91cHMiOiBbCiAgICAgICAgImVycm9yIgogICAgICBdLCAKICAgICAgInRpdGxlIjogIlVuYWJsZSB0byBpZGVudGlmeSBwYWNrYWdlIHByb3ZpZGluZyB0aGUgYm9vdGVkIGtlcm5lbC4iLCAKICAgICAgInRpbWVTdGFtcCI6ICIyMDI1LTEyLTE5VDEwOjU4OjM2LjY3NjcxMFoiLCAKICAgICAgImhvc3RuYW1lIjogIm1hbmFnZWQtbm9kZTAxIiwgCiAgICAgICJhY3RvciI6ICJzY2FuX3NvdXJjZV9rZXJuZWwiLCAKICAgICAgInN1bW1hcnkiOiAiIiwgCiAgICAgICJhdWRpZW5jZSI6ICJzeXNhZG1pbiIsIAogICAgICAia2V5IjogIjQ2Zjc2ZjYxMjRiNTUzN2U1ZGZkMGU2YzI1MGQzZjQyYzhhOTc2OTEiLCAKICAgICAgImlkIjogImQ3MGQ3ODQ3NmFjYjYyNThlMjI5YzdmYTU3M2Q4YTA0MGEwNDQzNWE0NzRmNjI0ZmExYzdlNDdkYzk4ZmU2NmUiLCAKICAgICAgInNldmVyaXR5IjogImhpZ2giCiAgICB9LCAKICAgIHsKICAgICAgImdyb3VwcyI6IFsKICAgICAgICAiZXJyb3IiCiAgICAgIF0sIAogICAgICAidGl0bGUiOiAiQSBzdWJzY3JpcHRpb24tbWFuYWdlciBjb21tYW5kIGZhaWxlZCB0byBleGVjdXRlIiwgCiAgICAgICJ0aW1lU3RhbXAiOiAiMjAyNS0xMi0xOVQxMDo1ODozOC44OTMyNzZaIiwgCiAgICAgICJob3N0bmFtZSI6ICJtYW5hZ2VkLW5vZGUwMSIsIAogICAgICAiYWN0b3IiOiAic2Nhbl9zdWJzY3JpcHRpb25fbWFuYWdlcl9pbmZvIiwgCiAgICAgICJzdW1tYXJ5IjogIntcImxpbmtcIjogXCJodHRwczovL2FjY2Vzcy5yZWRoYXQuY29tL3NvbHV0aW9ucy82MTM4MzcyXCIsIFwiZGV0YWlsc1wiOiBcIkNvbW1hbmQgWydzdWJzY3JpcHRpb24tbWFuYWdlcicsICdyZWxlYXNlJ10gZmFpbGVkIHdpdGggZXhpdCBjb2RlIDEuXCIsIFwic3RkZXJyXCI6IFwiVGhpcyBzeXN0ZW0gaXMgbm90IHlldCByZWdpc3RlcmVkLiBUcnkgJ3N1YnNjcmlwdGlvbi1tYW5hZ2VyIHJlZ2lzdGVyIC0taGVscCcgZm9yIG1vcmUgaW5mb3JtYXRpb24uXFxuXCIsIFwiaGludFwiOiBcIlBsZWFzZSBlbnN1cmUgeW91IGhhdmUgYSB2YWxpZCBSSEVMIHN1YnNjcmlwdGlvbiBhbmQgeW91ciBuZXR3b3JrIGlzIHVwLiBJZiB5b3UgYXJlIHVzaW5nIHByb3h5IGZvciBSZWQgSGF0IHN1YnNjcmlwdGlvbi1tYW5hZ2VyLCBwbGVhc2UgbWFrZSBzdXJlIGl0IGlzIHNwZWNpZmllZCBpbnNpZGUgdGhlIC9ldGMvcmhzbS9yaHNtLmNvbmYgZmlsZS4gT3IgdXNlIHRoZSAtLW5vLXJoc20gb3B0aW9uIHdoZW4gcnVubmluZyBsZWFwcCwgaWYgeW91IGRvIG5vdCB3YW50IHRvIHVzZSBzdWJzY3JpcHRpb24tbWFuYWdlciBmb3IgdGhlIGluLXBsYWNlIHVwZ3JhZGUgYW5kIHlvdSB3YW50IHRvIGRlbGl2ZXIgYWxsIHRhcmdldCByZXBvc2l0b3JpZXMgYnkgeW91cnNlbGYgb3IgdXNpbmcgUkhVSSBvbiBwdWJsaWMgY2xvdWQuXCJ9IiwgCiAgICAgICJhdWRpZW5jZSI6ICJzeXNhZG1pbiIsIAogICAgICAia2V5IjogIjdlYzgyNjk3ODRkYjFiYmEyYWM1NGFlNDM4Njg5ZWYzOTdlMTY4MzMiLCAKICAgICAgImlkIjogIjJlM2VlN2E4ZDViOTBhZTMyNzVmODhhYTc1YTRmMGU0NTdmZjNhNmE1ZWJlZWM4ODU3MjZlYjYzOTgwMWFkOWIiLCAKICAgICAgInNldmVyaXR5IjogImhpZ2giCiAgICB9LCAKICAgIHsKICAgICAgImdyb3VwcyI6IFsKICAgICAgICAicmVwb3NpdG9yeSIsIAogICAgICAgICJmYWlsdXJlIgogICAgICBdLCAKICAgICAgInRpdGxlIjogIkV4Y2x1ZGVkIHRhcmdldCBzeXN0ZW0gcmVwb3NpdG9yaWVzIiwgCiAgICAgICJ0aW1lU3RhbXAiOiAiMjAyNS0xMi0xOVQxMDo1ODozOS4wNzc1NDhaIiwgCiAgICAgICJob3N0bmFtZSI6ICJtYW5hZ2VkLW5vZGUwMSIsIAogICAgICAiZGV0YWlsIjogewogICAgICAgICJyZW1lZGlhdGlvbnMiOiBbCiAgICAgICAgICB7CiAgICAgICAgICAgICJ0eXBlIjogImhpbnQiLCAKICAgICAgICAgICAgImNvbnRleHQiOiAiSWYgc29tZSBvZiBleGNsdWRlZCByZXBvc2l0b3JpZXMgYXJlIHN0aWxsIHJlcXVpcmVkIHRvIGJlIHVzZWQgZHVyaW5nIHRoZSB1cGdyYWRlLCBleGVjdXRlIGxlYXBwIHdpdGggdGhlIC0tZW5hYmxlcmVwbyBvcHRpb24gd2l0aCB0aGUgcmVwb2lkIG9mIHRoZSByZXBvc2l0b3J5IHJlcXVpcmVkIHRvIGJlIGVuYWJsZWQgYXMgYW4gYXJndW1lbnQgKHRoZSBvcHRpb24gY2FuIGJlIHVzZWQgbXVsdGlwbGUgdGltZXMpLiIKICAgICAgICAgIH0KICAgICAgICBdCiAgICAgIH0sIAogICAgICAiYWN0b3IiOiAicmVwb3NpdG9yaWVzX2JsYWNrbGlzdCIsIAogICAgICAic3VtbWFyeSI6ICJUaGUgZm9sbG93aW5nIHJlcG9zaXRvcmllcyBhcmUgbm90IHN1cHBvcnRlZCBieSBSZWQgSGF0IGFuZCBhcmUgZXhjbHVkZWQgZnJvbSB0aGUgbGlzdCBvZiByZXBvc2l0b3JpZXMgdXNlZCBkdXJpbmcgdGhlIHVwZ3JhZGUuXG4tIGNvZGVyZWFkeS1idWlsZGVyLWJldGEtZm9yLXJoZWwtOC1zMzkweC1ycG1zXG4tIGNvZGVyZWFkeS1idWlsZGVyLWJldGEtZm9yLXJoZWwtOC1wcGM2NGxlLXJwbXNcbi0gcmh1aS1jb2RlcmVhZHktYnVpbGRlci1mb3ItcmhlbC04LXg4Nl82NC1yaHVpLXJwbXNcbi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC1hYXJjaDY0LWV1cy1ycG1zXG4tIGNvZGVyZWFkeS1idWlsZGVyLWZvci1yaGVsLTgtcHBjNjRsZS1ldXMtcnBtc1xuLSBjb2RlcmVhZHktYnVpbGRlci1iZXRhLWZvci1yaGVsLTgteDg2XzY0LXJwbXNcbi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC1hYXJjaDY0LXJwbXNcbi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC1zMzkweC1ycG1zXG4tIGNvZGVyZWFkeS1idWlsZGVyLWZvci1yaGVsLTgtczM5MHgtZXVzLXJwbXNcbi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC14ODZfNjQtZXVzLXJwbXNcbi0gcmh1aS1jb2RlcmVhZHktYnVpbGRlci1mb3ItcmhlbC04LWFhcmNoNjQtcmh1aS1ycG1zXG4tIGNvZGVyZWFkeS1idWlsZGVyLWJldGEtZm9yLXJoZWwtOC1hYXJjaDY0LXJwbXNcbi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC1yaHVpLXJwbXNcbi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC14ODZfNjQtcmh1aS1ycG1zXG4tIGNvZGVyZWFkeS1idWlsZGVyLWZvci1yaGVsLTgteDg2XzY0LXJwbXNcbi0gY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC14ODZfNjQtZXVzLXJodWktcnBtc1xuLSBjb2RlcmVhZHktYnVpbGRlci1mb3ItcmhlbC04LXBwYzY0bGUtcnBtcyIsIAogICAgICAiYXVkaWVuY2UiOiAic3lzYWRtaW4iLCAKICAgICAgImtleSI6ICIxYjkxMzJjYjIzNjJhZTc4MzBlNDhlZWU3ODExYmU5NTI3NzQ3ZGU4IiwgCiAgICAgICJpZCI6ICJmOGQxN2U2Zjc1ZDUxZGY5Njg4MDYwZDhhNDRiMmJjNjVlN2FiNGEzNmZmMjM5YjlmOGZjZmVkNjE1NzU0N2RhIiwgCiAgICAgICJzZXZlcml0eSI6ICJpbmZvIgogICAgfSwgCiAgICB7CiAgICAgICJncm91cHMiOiBbCiAgICAgICAgInJlcG9zaXRvcnkiCiAgICAgIF0sIAogICAgICAidGl0bGUiOiAiUGFja2FnZXMgYXZhaWxhYmxlIGluIGV4Y2x1ZGVkIHJlcG9zaXRvcmllcyB3aWxsIG5vdCBiZSBpbnN0YWxsZWQiLCAKICAgICAgInRpbWVTdGFtcCI6ICIyMDI1LTEyLTE5VDEwOjU4OjQxLjUyNzY0NVoiLCAKICAgICAgImhvc3RuYW1lIjogIm1hbmFnZWQtbm9kZTAxIiwgCiAgICAgICJkZXRhaWwiOiB7CiAgICAgICAgInJlbGF0ZWRfcmVzb3VyY2VzIjogWwogICAgICAgICAgewogICAgICAgICAgICAic2NoZW1lIjogInBhY2thZ2UiLCAKICAgICAgICAgICAgInRpdGxlIjogInB5dGhvbjMtcHl4YXR0ciIKICAgICAgICAgIH0sIAogICAgICAgICAgewogICAgICAgICAgICAic2NoZW1lIjogInBhY2thZ2UiLCAKICAgICAgICAgICAgInRpdGxlIjogInJwY2dlbiIKICAgICAgICAgIH0KICAgICAgICBdCiAgICAgIH0sIAogICAgICAiYWN0b3IiOiAicGVzX2V2ZW50c19zY2FubmVyIiwgCiAgICAgICJzdW1tYXJ5IjogIjIgcGFja2FnZXMgd2lsbCBiZSBza2lwcGVkIGJlY2F1c2UgdGhleSBhcmUgYXZhaWxhYmxlIG9ubHkgaW4gdGFyZ2V0IHN5c3RlbSByZXBvc2l0b3JpZXMgdGhhdCBhcmUgaW50ZW50aW9uYWxseSBleGNsdWRlZCBmcm9tIHRoZSBsaXN0IG9mIHJlcG9zaXRvcmllcyB1c2VkIGR1cmluZyB0aGUgdXBncmFkZS4gU2VlIHRoZSByZXBvcnQgbWVzc2FnZSB0aXRsZWQgXCJFeGNsdWRlZCB0YXJnZXQgc3lzdGVtIHJlcG9zaXRvcmllc1wiIGZvciBkZXRhaWxzLlxuVGhlIGxpc3Qgb2YgdGhlc2UgcGFja2FnZXM6XG4tIHB5dGhvbjMtcHl4YXR0ciAocmVwb2lkOiBjb2RlcmVhZHktYnVpbGRlci1mb3ItcmhlbC04LXg4Nl82NC1ycG1zKVxuLSBycGNnZW4gKHJlcG9pZDogY29kZXJlYWR5LWJ1aWxkZXItZm9yLXJoZWwtOC14ODZfNjQtcnBtcykiLCAKICAgICAgImF1ZGllbmNlIjogInN5c2FkbWluIiwgCiAgICAgICJrZXkiOiAiMjQzN2UyMDQ4MDhmOTg3NDc3YzBlOWJlOGU0Yzk1YjNhODdhOWYzZSIsIAogICAgICAiaWQiOiAiYmM0OTZlZGRmYWZkZjc2YWY2ZDUwOTUzMzg0YzAxZWY0MDc0ODFiNzJjOTI3MDRjYWZlMWMzNTg0MDcyMWJiMyIsIAogICAgICAic2V2ZXJpdHkiOiAiaGlnaCIKICAgIH0KICBdCn0K", "encoding": "base64", "source": "/var/log/leapp/leapp-report.json"} TASK [infra.leapp.common : parse_leapp_report | Parse report results] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:26 ok: [managed-node01] => {"ansible_facts": {"leapp_report_json": {"entries": [{"actor": "scan_source_kernel", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "d70d78476acb6258e229c7fa573d8a040a04435a474f624fa1c7e47dc98fe66e", "key": "46f76f6124b5537e5dfd0e6c250d3f42c8a97691", "severity": "high", "summary": "", "timeStamp": "2025-12-19T10:58:36.676710Z", "title": "Unable to identify package providing the booted kernel."}, {"actor": "scan_subscription_manager_info", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "2e3ee7a8d5b90ae3275f88aa75a4f0e457ff3a6a5ebeec885726eb639801ad9b", "key": "7ec8269784db1bba2ac54ae438689ef397e16833", "severity": "high", "summary": "{\"link\": \"https://access.redhat.com/solutions/6138372\", \"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\"}", "timeStamp": "2025-12-19T10:58:38.893276Z", "title": "A subscription-manager command failed to execute"}, {"actor": "repositories_blacklist", "audience": "sysadmin", "detail": {"remediations": [{"context": "If some of excluded repositories are still required to be used during the upgrade, execute leapp with the --enablerepo option with the repoid of the repository required to be enabled as an argument (the option can be used multiple times).", "type": "hint"}]}, "groups": ["repository", "failure"], "hostname": "managed-node01", "id": "f8d17e6f75d51df9688060d8a44b2bc65e7ab4a36ff239b9f8fcfed6157547da", "key": "1b9132cb2362ae7830e48eee7811be9527747de8", "severity": "info", "summary": "The following repositories are not supported by Red Hat and are excluded from the list of repositories used during the upgrade.\n- codeready-builder-beta-for-rhel-8-s390x-rpms\n- codeready-builder-beta-for-rhel-8-ppc64le-rpms\n- rhui-codeready-builder-for-rhel-8-x86_64-rhui-rpms\n- codeready-builder-for-rhel-8-aarch64-eus-rpms\n- codeready-builder-for-rhel-8-ppc64le-eus-rpms\n- codeready-builder-beta-for-rhel-8-x86_64-rpms\n- codeready-builder-for-rhel-8-aarch64-rpms\n- codeready-builder-for-rhel-8-s390x-rpms\n- codeready-builder-for-rhel-8-s390x-eus-rpms\n- codeready-builder-for-rhel-8-x86_64-eus-rpms\n- rhui-codeready-builder-for-rhel-8-aarch64-rhui-rpms\n- codeready-builder-beta-for-rhel-8-aarch64-rpms\n- codeready-builder-for-rhel-8-rhui-rpms\n- codeready-builder-for-rhel-8-x86_64-rhui-rpms\n- codeready-builder-for-rhel-8-x86_64-rpms\n- codeready-builder-for-rhel-8-x86_64-eus-rhui-rpms\n- codeready-builder-for-rhel-8-ppc64le-rpms", "timeStamp": "2025-12-19T10:58:39.077548Z", "title": "Excluded target system repositories"}, {"actor": "pes_events_scanner", "audience": "sysadmin", "detail": {"related_resources": [{"scheme": "package", "title": "python3-pyxattr"}, {"scheme": "package", "title": "rpcgen"}]}, "groups": ["repository"], "hostname": "managed-node01", "id": "bc496eddfafdf76af6d50953384c01ef407481b72c92704cafe1c35840721bb3", "key": "2437e204808f987477c0e9be8e4c95b3a87a9f3e", "severity": "high", "summary": "2 packages will be skipped because they are available only in target system repositories that are intentionally excluded from the list of repositories used during the upgrade. See the report message titled \"Excluded target system repositories\" for details.\nThe list of these packages:\n- python3-pyxattr (repoid: codeready-builder-for-rhel-8-x86_64-rpms)\n- rpcgen (repoid: codeready-builder-for-rhel-8-x86_64-rpms)", "timeStamp": "2025-12-19T10:58:41.527645Z", "title": "Packages available in excluded repositories will not be installed"}], "leapp_run_id": "a93de84e-8e47-4ff6-8edc-1d4a8391fb70"}, "leapp_report_txt": ["Risk Factor: high (error)", "Title: Unable to identify package providing the booted kernel.", "Summary: ", "Key: 46f76f6124b5537e5dfd0e6c250d3f42c8a97691", "----------------------------------------", "Risk Factor: high (error)", "Title: A subscription-manager command failed to execute", "Summary: {\"link\": \"https://access.redhat.com/solutions/6138372\", \"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\"}", "Key: 7ec8269784db1bba2ac54ae438689ef397e16833", "----------------------------------------", "Risk Factor: high ", "Title: Packages available in excluded repositories will not be installed", "Summary: 2 packages will be skipped because they are available only in target system repositories that are intentionally excluded from the list of repositories used during the upgrade. See the report message titled \"Excluded target system repositories\" for details.", "The list of these packages:", "- python3-pyxattr (repoid: codeready-builder-for-rhel-8-x86_64-rpms)", "- rpcgen (repoid: codeready-builder-for-rhel-8-x86_64-rpms)", "Key: 2437e204808f987477c0e9be8e4c95b3a87a9f3e", "----------------------------------------", "Risk Factor: info ", "Title: Excluded target system repositories", "Summary: The following repositories are not supported by Red Hat and are excluded from the list of repositories used during the upgrade.", "- codeready-builder-beta-for-rhel-8-s390x-rpms", "- codeready-builder-beta-for-rhel-8-ppc64le-rpms", "- rhui-codeready-builder-for-rhel-8-x86_64-rhui-rpms", "- codeready-builder-for-rhel-8-aarch64-eus-rpms", "- codeready-builder-for-rhel-8-ppc64le-eus-rpms", "- codeready-builder-beta-for-rhel-8-x86_64-rpms", "- codeready-builder-for-rhel-8-aarch64-rpms", "- codeready-builder-for-rhel-8-s390x-rpms", "- codeready-builder-for-rhel-8-s390x-eus-rpms", "- codeready-builder-for-rhel-8-x86_64-eus-rpms", "- rhui-codeready-builder-for-rhel-8-aarch64-rhui-rpms", "- codeready-builder-beta-for-rhel-8-aarch64-rpms", "- codeready-builder-for-rhel-8-rhui-rpms", "- codeready-builder-for-rhel-8-x86_64-rhui-rpms", "- codeready-builder-for-rhel-8-x86_64-rpms", "- codeready-builder-for-rhel-8-x86_64-eus-rhui-rpms", "- codeready-builder-for-rhel-8-ppc64le-rpms", "Remediation: [hint] If some of excluded repositories are still required to be used during the upgrade, execute leapp with the --enablerepo option with the repoid of the repository required to be enabled as an argument (the option can be used multiple times).", "Key: 1b9132cb2362ae7830e48eee7811be9527747de8", "----------------------------------------", ""]}, "changed": false} TASK [infra.leapp.common : parse_leapp_report | Check for inhibitors] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:32 ok: [managed-node01] => (item={'groups': ['error'], 'title': 'Unable to identify package providing the booted kernel.', 'timeStamp': '2025-12-19T10:58:36.676710Z', 'hostname': 'managed-node01', 'actor': 'scan_source_kernel', 'summary': '', 'audience': 'sysadmin', 'key': '46f76f6124b5537e5dfd0e6c250d3f42c8a97691', 'id': 'd70d78476acb6258e229c7fa573d8a040a04435a474f624fa1c7e47dc98fe66e', 'severity': 'high'}) => {"ansible_facts": {"leapp_inhibitors": [{"actor": "scan_source_kernel", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "d70d78476acb6258e229c7fa573d8a040a04435a474f624fa1c7e47dc98fe66e", "key": "46f76f6124b5537e5dfd0e6c250d3f42c8a97691", "severity": "high", "summary": "", "timeStamp": "2025-12-19T10:58:36.676710Z", "title": "Unable to identify package providing the booted kernel."}], "upgrade_inhibited": true}, "ansible_loop_var": "item", "changed": false, "item": {"actor": "scan_source_kernel", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "d70d78476acb6258e229c7fa573d8a040a04435a474f624fa1c7e47dc98fe66e", "key": "46f76f6124b5537e5dfd0e6c250d3f42c8a97691", "severity": "high", "summary": "", "timeStamp": "2025-12-19T10:58:36.676710Z", "title": "Unable to identify package providing the booted kernel."}} ok: [managed-node01] => (item={'groups': ['error'], 'title': 'A subscription-manager command failed to execute', 'timeStamp': '2025-12-19T10:58:38.893276Z', 'hostname': 'managed-node01', 'actor': 'scan_subscription_manager_info', 'summary': '{"link": "https://access.redhat.com/solutions/6138372", "details": "Command [\'subscription-manager\', \'release\'] failed with exit code 1.", "stderr": "This system is not yet registered. Try \'subscription-manager register --help\' for more information.\\n", "hint": "Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud."}', 'audience': 'sysadmin', 'key': '7ec8269784db1bba2ac54ae438689ef397e16833', 'id': '2e3ee7a8d5b90ae3275f88aa75a4f0e457ff3a6a5ebeec885726eb639801ad9b', 'severity': 'high'}) => {"ansible_facts": {"leapp_inhibitors": [{"actor": "scan_source_kernel", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "d70d78476acb6258e229c7fa573d8a040a04435a474f624fa1c7e47dc98fe66e", "key": "46f76f6124b5537e5dfd0e6c250d3f42c8a97691", "severity": "high", "summary": "", "timeStamp": "2025-12-19T10:58:36.676710Z", "title": "Unable to identify package providing the booted kernel."}, {"actor": "scan_subscription_manager_info", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "2e3ee7a8d5b90ae3275f88aa75a4f0e457ff3a6a5ebeec885726eb639801ad9b", "key": "7ec8269784db1bba2ac54ae438689ef397e16833", "severity": "high", "summary": "{\"link\": \"https://access.redhat.com/solutions/6138372\", \"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\"}", "timeStamp": "2025-12-19T10:58:38.893276Z", "title": "A subscription-manager command failed to execute"}], "upgrade_inhibited": true}, "ansible_loop_var": "item", "changed": false, "item": {"actor": "scan_subscription_manager_info", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "2e3ee7a8d5b90ae3275f88aa75a4f0e457ff3a6a5ebeec885726eb639801ad9b", "key": "7ec8269784db1bba2ac54ae438689ef397e16833", "severity": "high", "summary": "{\"link\": \"https://access.redhat.com/solutions/6138372\", \"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\"}", "timeStamp": "2025-12-19T10:58:38.893276Z", "title": "A subscription-manager command failed to execute"}} skipping: [managed-node01] => (item={'groups': ['repository', 'failure'], 'title': 'Excluded target system repositories', 'timeStamp': '2025-12-19T10:58:39.077548Z', 'hostname': 'managed-node01', 'detail': {'remediations': [{'type': 'hint', 'context': 'If some of excluded repositories are still required to be used during the upgrade, execute leapp with the --enablerepo option with the repoid of the repository required to be enabled as an argument (the option can be used multiple times).'}]}, 'actor': 'repositories_blacklist', 'summary': 'The following repositories are not supported by Red Hat and are excluded from the list of repositories used during the upgrade.\n- codeready-builder-beta-for-rhel-8-s390x-rpms\n- codeready-builder-beta-for-rhel-8-ppc64le-rpms\n- rhui-codeready-builder-for-rhel-8-x86_64-rhui-rpms\n- codeready-builder-for-rhel-8-aarch64-eus-rpms\n- codeready-builder-for-rhel-8-ppc64le-eus-rpms\n- codeready-builder-beta-for-rhel-8-x86_64-rpms\n- codeready-builder-for-rhel-8-aarch64-rpms\n- codeready-builder-for-rhel-8-s390x-rpms\n- codeready-builder-for-rhel-8-s390x-eus-rpms\n- codeready-builder-for-rhel-8-x86_64-eus-rpms\n- rhui-codeready-builder-for-rhel-8-aarch64-rhui-rpms\n- codeready-builder-beta-for-rhel-8-aarch64-rpms\n- codeready-builder-for-rhel-8-rhui-rpms\n- codeready-builder-for-rhel-8-x86_64-rhui-rpms\n- codeready-builder-for-rhel-8-x86_64-rpms\n- codeready-builder-for-rhel-8-x86_64-eus-rhui-rpms\n- codeready-builder-for-rhel-8-ppc64le-rpms', 'audience': 'sysadmin', 'key': '1b9132cb2362ae7830e48eee7811be9527747de8', 'id': 'f8d17e6f75d51df9688060d8a44b2bc65e7ab4a36ff239b9f8fcfed6157547da', 'severity': 'info'}) => {"ansible_loop_var": "item", "changed": false, "item": {"actor": "repositories_blacklist", "audience": "sysadmin", "detail": {"remediations": [{"context": "If some of excluded repositories are still required to be used during the upgrade, execute leapp with the --enablerepo option with the repoid of the repository required to be enabled as an argument (the option can be used multiple times).", "type": "hint"}]}, "groups": ["repository", "failure"], "hostname": "managed-node01", "id": "f8d17e6f75d51df9688060d8a44b2bc65e7ab4a36ff239b9f8fcfed6157547da", "key": "1b9132cb2362ae7830e48eee7811be9527747de8", "severity": "info", "summary": "The following repositories are not supported by Red Hat and are excluded from the list of repositories used during the upgrade.\n- codeready-builder-beta-for-rhel-8-s390x-rpms\n- codeready-builder-beta-for-rhel-8-ppc64le-rpms\n- rhui-codeready-builder-for-rhel-8-x86_64-rhui-rpms\n- codeready-builder-for-rhel-8-aarch64-eus-rpms\n- codeready-builder-for-rhel-8-ppc64le-eus-rpms\n- codeready-builder-beta-for-rhel-8-x86_64-rpms\n- codeready-builder-for-rhel-8-aarch64-rpms\n- codeready-builder-for-rhel-8-s390x-rpms\n- codeready-builder-for-rhel-8-s390x-eus-rpms\n- codeready-builder-for-rhel-8-x86_64-eus-rpms\n- rhui-codeready-builder-for-rhel-8-aarch64-rhui-rpms\n- codeready-builder-beta-for-rhel-8-aarch64-rpms\n- codeready-builder-for-rhel-8-rhui-rpms\n- codeready-builder-for-rhel-8-x86_64-rhui-rpms\n- codeready-builder-for-rhel-8-x86_64-rpms\n- codeready-builder-for-rhel-8-x86_64-eus-rhui-rpms\n- codeready-builder-for-rhel-8-ppc64le-rpms", "timeStamp": "2025-12-19T10:58:39.077548Z", "title": "Excluded target system repositories"}, "skip_reason": "Conditional result was False"} skipping: [managed-node01] => (item={'groups': ['repository'], 'title': 'Packages available in excluded repositories will not be installed', 'timeStamp': '2025-12-19T10:58:41.527645Z', 'hostname': 'managed-node01', 'detail': {'related_resources': [{'scheme': 'package', 'title': 'python3-pyxattr'}, {'scheme': 'package', 'title': 'rpcgen'}]}, 'actor': 'pes_events_scanner', 'summary': '2 packages will be skipped because they are available only in target system repositories that are intentionally excluded from the list of repositories used during the upgrade. See the report message titled "Excluded target system repositories" for details.\nThe list of these packages:\n- python3-pyxattr (repoid: codeready-builder-for-rhel-8-x86_64-rpms)\n- rpcgen (repoid: codeready-builder-for-rhel-8-x86_64-rpms)', 'audience': 'sysadmin', 'key': '2437e204808f987477c0e9be8e4c95b3a87a9f3e', 'id': 'bc496eddfafdf76af6d50953384c01ef407481b72c92704cafe1c35840721bb3', 'severity': 'high'}) => {"ansible_loop_var": "item", "changed": false, "item": {"actor": "pes_events_scanner", "audience": "sysadmin", "detail": {"related_resources": [{"scheme": "package", "title": "python3-pyxattr"}, {"scheme": "package", "title": "rpcgen"}]}, "groups": ["repository"], "hostname": "managed-node01", "id": "bc496eddfafdf76af6d50953384c01ef407481b72c92704cafe1c35840721bb3", "key": "2437e204808f987477c0e9be8e4c95b3a87a9f3e", "severity": "high", "summary": "2 packages will be skipped because they are available only in target system repositories that are intentionally excluded from the list of repositories used during the upgrade. See the report message titled \"Excluded target system repositories\" for details.\nThe list of these packages:\n- python3-pyxattr (repoid: codeready-builder-for-rhel-8-x86_64-rpms)\n- rpcgen (repoid: codeready-builder-for-rhel-8-x86_64-rpms)", "timeStamp": "2025-12-19T10:58:41.527645Z", "title": "Packages available in excluded repositories will not be installed"}, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : parse_leapp_report | Collect inhibitors] ************ task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:44 ok: [managed-node01] => {"changed": false, "cmd": ["awk", "/\\(inhibitor\\)/,/^-------/", "/var/log/leapp/leapp-report.txt"], "delta": "0:00:00.003674", "end": "2025-12-19 05:59:06.538524", "failed_when_result": false, "msg": "", "rc": 0, "start": "2025-12-19 05:59:06.534850", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []} TASK [infra.leapp.common : parse_leapp_report | Collect high errors] *********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:53 ok: [managed-node01] => {"changed": false, "cmd": ["awk", "/high \\(error\\)/,/^-------/", "/var/log/leapp/leapp-report.txt"], "delta": "0:00:00.003474", "end": "2025-12-19 05:59:06.795286", "failed_when_result": false, "msg": "", "rc": 0, "start": "2025-12-19 05:59:06.791812", "stderr": "", "stderr_lines": [], "stdout": "Risk Factor: high (error)\nTitle: Unable to identify package providing the booted kernel.\nSummary: \nKey: 46f76f6124b5537e5dfd0e6c250d3f42c8a97691\n----------------------------------------\nRisk Factor: high (error)\nTitle: A subscription-manager command failed to execute\nSummary: {\"link\": \"https://access.redhat.com/solutions/6138372\", \"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\"}\nKey: 7ec8269784db1bba2ac54ae438689ef397e16833\n----------------------------------------", "stdout_lines": ["Risk Factor: high (error)", "Title: Unable to identify package providing the booted kernel.", "Summary: ", "Key: 46f76f6124b5537e5dfd0e6c250d3f42c8a97691", "----------------------------------------", "Risk Factor: high (error)", "Title: A subscription-manager command failed to execute", "Summary: {\"link\": \"https://access.redhat.com/solutions/6138372\", \"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\"}", "Key: 7ec8269784db1bba2ac54ae438689ef397e16833", "----------------------------------------"]} TASK [infra.leapp.upgrade : leapp-upgrade | Display inhibitors] **************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:103 skipping: [managed-node01] => {} TASK [infra.leapp.upgrade : leapp-upgrade | Display errors] ******************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:108 ok: [managed-node01] => { "results_errors.stdout_lines": [ "Risk Factor: high (error)", "Title: Unable to identify package providing the booted kernel.", "Summary: ", "Key: 46f76f6124b5537e5dfd0e6c250d3f42c8a97691", "----------------------------------------", "Risk Factor: high (error)", "Title: A subscription-manager command failed to execute", "Summary: {\"link\": \"https://access.redhat.com/solutions/6138372\", \"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\"}", "Key: 7ec8269784db1bba2ac54ae438689ef397e16833", "----------------------------------------" ] } TASK [infra.leapp.upgrade : leapp-upgrade | Fail Leapp upgrade] **************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tasks/leapp-upgrade.yml:113 fatal: [managed-node01]: FAILED! => {"changed": false, "msg": "Errors encountered running Leapp upgrade command. Review the tasks above or the result file at /var/log/leapp/leapp-report.txt."} TASK [Test | Check error] ****************************************************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:14 ok: [managed-node01] => { "msg": "errors {\n \"_ansible_no_log\": false,\n \"changed\": false,\n \"failed\": true,\n \"msg\": \"Errors encountered running Leapp upgrade command. Review the tasks above or the result file at /var/log/leapp/leapp-report.txt.\"\n}" } TASK [Test | Ensure correct error] ********************************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:18 ok: [managed-node01] => { "changed": false, "msg": "All assertions passed" } TASK [Cleanup | Remove log files] ********************************************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/upgrade/tests/tests_default.yml:27 changed: [managed-node01] => {"changed": true, "cmd": "set -euxo pipefail\nrm -f /var/log/leapp/leapp-upgrade.log\nrm -f /var/log/ripu/ripu.log*\n", "delta": "0:00:00.005452", "end": "2025-12-19 05:59:07.225033", "msg": "", "rc": 0, "start": "2025-12-19 05:59:07.219581", "stderr": "+ rm -f /var/log/leapp/leapp-upgrade.log\n+ rm -f /var/log/ripu/ripu.log", "stderr_lines": ["+ rm -f /var/log/leapp/leapp-upgrade.log", "+ rm -f /var/log/ripu/ripu.log"], "stdout": "", "stdout_lines": []} RUNNING HANDLER [infra.leapp.common : Check for log file] ********************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:3 ok: [managed-node01] => {"changed": false, "stat": {"exists": false}} RUNNING HANDLER [infra.leapp.common : Add end time to log file] **************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:9 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} RUNNING HANDLER [infra.leapp.common : Slurp ripu.log file] ********************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:19 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} RUNNING HANDLER [infra.leapp.common : Decode ripu.log file] ******************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:26 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} RUNNING HANDLER [infra.leapp.common : Rename log file] ************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/handlers/main.yml:32 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} PLAY RECAP ********************************************************************* managed-node01 : ok=28 changed=5 unreachable=0 failed=0 skipped=16 rescued=2 ignored=0