Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[develop] Fetch missing packages, add stderr handling #3132

Merged
merged 7 commits into from
May 9, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ class Dnf(Command):
def __init__(self, retries: int):
super().__init__('dnf', retries)

def update(self, enablerepo: str,
def update(self, enablerepo: str = None,
package: str = None,
disablerepo: str = '*',
disablerepo: str = None,
assume_yes: bool = True):
"""
Interface for `dnf update`
Expand All @@ -32,10 +32,21 @@ def update(self, enablerepo: str,
if package is not None:
update_parameters.append(package)

update_parameters.append(f'--disablerepo={disablerepo}')
update_parameters.append(f'--enablerepo={enablerepo}')
if disablerepo is not None:
update_parameters.append(f'--disablerepo={disablerepo}')

if enablerepo is not None:
update_parameters.append(f'--enablerepo={enablerepo}')

proc = self.run(update_parameters)

if 'error' in proc.stdout:
raise CriticalError(
f'Found an error. dnf update failed for package `{package}`, reason: `{proc.stdout}`')
if proc.stderr:
raise CriticalError(
f'dnf update failed for packages `{package}`, reason: `{proc.stderr}`')

self.run(update_parameters)

def install(self, package: str,
assume_yes: bool = True):
Expand All @@ -52,6 +63,13 @@ def install(self, package: str,
if not 'does not update' in proc.stdout: # trying to reinstall package with url
raise CriticalError(f'dnf install failed for `{package}`, reason `{proc.stdout}`')

if 'error' in proc.stdout:
raise CriticalError(
f'Found an error. dnf install failed for package `{package}`, reason: `{proc.stdout}`')
if proc.stderr:
raise CriticalError(
f'dnf install failed for package `{package}`, reason: `{proc.stderr}`')

def remove(self, package: str,
assume_yes: bool = True):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from typing import List

from src.command.command import Command
from src.error import CriticalError


class DnfDownload(Command):
Expand All @@ -21,7 +22,8 @@ def download_packages(self, packages: List[str],

args.append(f'--archlist={",".join(archlist)}')
args.append(f'--destdir={str(destdir)}')
args.append('--disableplugin=subscription-manager') # to speed up download
# to speed up download
args.append('--disableplugin=subscription-manager')

if exclude:
args.append(f'--exclude={exclude}')
Expand All @@ -32,4 +34,10 @@ def download_packages(self, packages: List[str],
args.append('-y')
args.extend(packages)

self.run(args)
process = self.run(args)
if 'error' in process.stdout:
raise CriticalError(
f'Found an error. dnf download failed for packages `{packages}`, reason: `{process.stdout}`')
if process.stderr:
raise CriticalError(
f'dnf download failed for packages `{packages}`, reason: `{process.stderr}`')
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import re
from typing import Callable, List

from src.command.command import Command
Expand All @@ -12,23 +13,25 @@ class DnfRepoquery(Command):
def __init__(self, retries: int):
super().__init__('dnf', retries) # repoquery would require yum-utils package

def __query(self, packages: List[str],
def __query(self, defined_packages: List[str],
queryformat: str,
archlist: List[str],
requires: bool,
resolve: bool,
output_handler: Callable,
only_newest: bool = True) -> List[str]:
only_newest: bool = True,
dependencies: bool = False) -> List[str]:
"""
Run generic query using `dnf repoquery` command.

:param packages: data will be returned for those `packages`
:param defined_packages: data will be returned for those `defined_packages`
:param queryformat: specify custom query output format
:param archlist: limit results to these architectures
:param requires: get capabilities that the packages depend on
:param requires: get capabilities that the defined_packages depend on
:param resolve: resolve capabilities to originating package(s)
:param output_handler: different queries produce different outputs, use specific output handler
:param only_newest: if there are more than one candidate packages, download only the newest one
:param dependencies: if it's only to grab dependencies
:raises:
:class:`CriticalError`: can be raised on exceeding retries or when error occurred
:class:`PackageNotfound`: when query did not return any package info
Expand All @@ -38,31 +41,40 @@ def __query(self, packages: List[str],

args.append('repoquery')
args.append(f'--archlist={",".join(archlist)}')
args.append('--disableplugin=subscription-manager') # to speed up querying
# to speed up querying
args.append('--disableplugin=subscription-manager')
if only_newest:
args.append('--latest-limit=1')
args.append(f'--queryformat={queryformat}')
args.append('--quiet')

if requires:
args.append('--requires')

if resolve:
args.append('--resolve')

args.append('-y') # to import GPG keys

args.extend(packages)
args.extend(defined_packages)

# dnf repoquery doesn't set error code on empty results
output = self.run(args).stdout
output_handler(output)
process = self.run(args)

output_handler(process.stdout, process.stderr)

packages: List[str] = []
for line in output.split('\n'):
for line in process.stdout.split('\n'):
if line:
packages.append(line)

if not dependencies:
missing_packages: List[str] = []
for package in defined_packages:
r = re.compile(f'.*{package}')
match = list(filter(r.match, packages))
if not match:
missing_packages.append(package)
if missing_packages:
raise PackageNotfound(
f'repoquery failed. Cannot find packages: {missing_packages}')
return packages

def query(self, packages: List[str], queryformat: str, archlist: List[str], only_newest: bool = True) -> List[str]:
Expand All @@ -79,14 +91,23 @@ def query(self, packages: List[str], queryformat: str, archlist: List[str], only
:returns: query result
"""

def output_handler(output: str):
def output_handler(output_stdout: str, output_stderr: str):
""" In addition to errors, handle missing packages """
if not output:
raise PackageNotfound(f'repoquery failed for packages `{packages}`, reason: some of package(s) not found')
elif 'error' in output:
raise CriticalError(f'repoquery failed for packages `{packages}`, reason: `{output}`')

return self.__query(packages, queryformat, archlist, False, False, output_handler, only_newest)
if not output_stdout:
raise PackageNotfound(
f'repoquery failed for packages `{packages}`, reason: some of package(s) not found')
if 'error' in output_stdout:
raise CriticalError(
f'Found an error. repoquery failed for packages `{packages}`, reason: `{output_stdout}`')
if "Last metadata expiration check" in output_stderr:
pass # https://dnf.readthedocs.io/en/latest/conf_ref.html#metadata-expire-label
elif "No matches found for the following disable plugin patterns: subscription-manager" in output_stderr:
pass # no subscription-manager on AlmaLinux
else:
raise CriticalError(
f'repoquery failed for packages `{packages}`, reason: `{output_stderr}`')

return self.__query(packages, queryformat, archlist, False, False, output_handler, only_newest, False)

def get_dependencies(self, packages: List[str], queryformat: str, archlist: List[str], only_newest: bool = True) -> List[str]:
"""
Expand All @@ -99,15 +120,24 @@ def get_dependencies(self, packages: List[str], queryformat: str, archlist: List
:raises:
:class:`CriticalError`: can be raised on exceeding retries or when error occurred
:class:`ValueError`: when `packages` list is empty
:returns: query result
:returns: list of dependencies for `packages`
"""
# repoquery without KEY argument will query dependencies for all packages
if not packages:
raise ValueError('packages: list cannot be empty')

def output_handler(output: str):
""" Handle errors """
if 'error' in output:
raise CriticalError(f'dnf repoquery failed for packages `{packages}`, reason: `{output}`')

return self.__query(packages, queryformat, archlist, True, True, output_handler, only_newest)
def output_handler(output_stdout: str, output_stderr: str):
""" In addition to errors, handle missing packages """
if not output_stdout:
raise PackageNotfound(
f'repoquery failed for packages `{packages}`, reason: some of package(s) not found')
if 'error' in output_stdout:
raise CriticalError(
f'Found an error. repoquery failed for packages `{packages}`, reason: `{output_stdout}`')
if "Last metadata expiration check" in output_stderr:
pass # https://dnf.readthedocs.io/en/latest/conf_ref.html#metadata-expire-label
elif output_stderr:
raise CriticalError(
f'repoquery failed for packages `{packages}`, reason: `{output_stderr}`')

return self.__query(packages, queryformat, archlist, True, True, output_handler, only_newest, True)
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@ def _create_backup_repositories(self):
logging.debug('Done.')

def _install_base_packages(self):

# Bug in RHEL 8.4 https://bugzilla.redhat.com/show_bug.cgi?id=2004853
self._tools.dnf.update(package='libmodulemd')

# some packages are from EPEL repo
# make sure that we reinstall it before proceeding
if self._tools.rpm.is_package_installed('epel-release'):
Expand All @@ -73,7 +77,7 @@ def _install_base_packages(self):
def _add_third_party_repositories(self):
# Fix for RHUI client certificate expiration [#2318]
if self._tools.dnf.is_repo_enabled('rhui-microsoft-azure-rhel'):
self._tools.dnf.update(enablerepo='rhui-microsoft-azure-rhel*')
self._tools.dnf.update(disablerepo='*', enablerepo='rhui-microsoft-azure-rhel*')

for repo in self._repositories:
repo_filepath = Path('/etc/yum.repos.d') / f'{repo}.repo'
Expand Down Expand Up @@ -103,6 +107,8 @@ def _add_third_party_repositories(self):
'2ndquadrant-dl-default-release-pg13-debug']:
self._tools.dnf_config_manager.disable_repo(repo)

self._tools.dnf.makecache(False, True)

def __remove_dnf_cache_for_custom_repos(self):
# clean metadata for upgrades (when the same package can be downloaded from changed repo)
repocaches: List[str] = list(self.__dnf_cache_path.iterdir())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ def test_interface_query(mocker):
'--disableplugin=subscription-manager',
'--latest-limit=1',
'--queryformat=some_format',
'--quiet',
'-y',
'tar',
'vim'
]
Expand All @@ -31,10 +29,8 @@ def test_interface_get_dependencies(mocker):
'--disableplugin=subscription-manager',
'--latest-limit=1',
'--queryformat=some_format',
'--quiet',
'--requires',
'--resolve',
'-y',
'tar',
'vim'
]
44 changes: 40 additions & 4 deletions docs/home/howto/CLUSTER.md
Original file line number Diff line number Diff line change
Expand Up @@ -1039,15 +1039,15 @@ sudo chmod +x /home/download-requirements.py # make the requirements script exec

After this you should be able to run the ```download-requirements.py``` from the ```home``` folder.

### RedHat 7.x
### RedHat 8.x

For RedHat you can use the following command to launch a container:

```shell
docker run -v /shared_folder:/home <--platform linux/amd64 or --platform linux/arm64> --rm -it registry.access.redhat.com/ubi7/ubi:7.9
docker run -v /shared_folder:/home <--platform linux/amd64 or --platform linux/arm64> --rm -it registry.access.redhat.com/ubi8/ubi:8.4
```

As the ```registry.access.redhat.com/ubi7/ubi:7.9``` image is multi-arch you can include ```--platform linux/amd64``` or ```--platform linux/arm64``` to run the container as the specified architecture. The ```/shared_folder``` should be a folder on your local machine containing the requirement scripts.
As the ```registry.access.redhat.com/ubi8/ubi:8.4``` image is multi-arch you can include ```--platform linux/amd64``` or ```--platform linux/arm64``` to run the container as the specified architecture. The ```/shared_folder``` should be a folder on your local machine containing the requirement scripts.

For running the ```download-requirements.py``` script you will need a RedHat developer subscription to register the running container and make sure you can access to official Redhat repos for the packages needed. More information on getting this free subscription [here](https://developers.redhat.com/articles/getting-red-hat-developer-subscription-what-rhel-users-need-know).

Expand All @@ -1059,7 +1059,43 @@ subscription-manager attach --auto # will enable the RedHat official repositorie
chmod +x /home/download-requirements.py # make the requirements script executable
```

After this you should be able to run the ```download-requirements.py``` from the ```home``` folder.
After this you should be able to run the ```download-requirements.py``` from the ```home``` folder:

```shell
/usr/libexec/platform-python /home/download-requirements.py /home/offline_requirements_rhel_8_x86_64 rhel-8
```

### AlmaLinux 8.x

For AlmaLinux, you can use the following command to launch a container:

```shell
docker run -v /shared_folder:/home --rm -it almalinux:8.4
```

The ```almalinux:8.4``` image is amd64 arch only. The ```/shared_folder``` should be a folder on your local machine containing the requirement scripts.

When you are inside the container run the following command to prepare for the running of the ```download-requirements.py``` script:

```shell
chmod +x /home/download-requirements.py # make the requirements script executable
```

After this you should be able to run the ```download-requirements.py``` from the ```home``` folder:

```shell
/usr/libexec/platform-python /home/download-requirements.py /home/offline_requirements_almalinux_8_4_x86_64 almalinux-8
```

### Known issues

In some local environments (eg. using AlmaLinux image) the following issue could appear:

```sh
Failed to set locale, defaulting to C.UTF-8
```

To fix the issue, verify or set your locales. Example: `export LC_ALL=C.UTF-8`

## How to additional custom Terraform templates

Expand Down