diff --git a/.github/workflows/pipeline.yml b/.github/workflows/pipeline.yml index 34cc692..d55a3b1 100644 --- a/.github/workflows/pipeline.yml +++ b/.github/workflows/pipeline.yml @@ -71,10 +71,15 @@ jobs: run: | tox -e pylint - - name: Run ruff + - name: Run ruff-check if: matrix.toxenv== 'py312' run: | - tox -e ruff + tox -e ruff-check + + - name: Run ruff-format-check + if: matrix.toxenv== 'py312' + run: | + tox -e ruff-format-check Test-Snap: runs-on: ubuntu-20.04 steps: diff --git a/README.md b/README.md index e8a35ed..009a406 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,10 @@ Running `tox -e py{36,37,38,39,310,311}` in project root directory will run all tox -e py310 ``` +## Formatting + +This project uses `ruff` for formatting. To format the code, simply run `tox -e ruff-format`. Running `tox` with no arguments will automatically run the `ruff-format` environment as well. + ## Versioning The project will use `..[]` as a versioning scheme. diff --git a/hotkdump/core/exceptions.py b/hotkdump/core/exceptions.py index f9fa395..cb079cc 100644 --- a/hotkdump/core/exceptions.py +++ b/hotkdump/core/exceptions.py @@ -3,18 +3,19 @@ # Copyright 2023 Canonical Limited. # SPDX-License-Identifier: GPL-3.0 -"""`hotkdump` exception types. -""" +"""`hotkdump` exception types.""" import logging class ExceptionWithLog(Exception): """Exception type with automatic logging.""" + def __init__(self, msg) -> None: logging.error("EXCEPTION: %s", msg) super().__init__(msg) + class NotAKernelCrashDumpException(ExceptionWithLog): """Exception thrown when the given file is not recognized as a crash dump file.""" diff --git a/hotkdump/core/folder_retention_manager.py b/hotkdump/core/folder_retention_manager.py index 66914e0..18f4da6 100644 --- a/hotkdump/core/folder_retention_manager.py +++ b/hotkdump/core/folder_retention_manager.py @@ -3,8 +3,7 @@ # Copyright 2023 Canonical Limited. # SPDX-License-Identifier: GPL-3.0 -"""Folder retention manager and its' policies. -""" +"""Folder retention manager and its' policies.""" import abc import os @@ -17,8 +16,7 @@ class RetentionPolicyBase(abc.ABC): - """Base class for all retention policies. - """ + """Base class for all retention policies.""" @property @abc.abstractmethod @@ -33,8 +31,13 @@ def remove_file(self, file_info): """Remove a file.""" (path, stat) = file_info os.remove(path) - logging.debug("removed %s to reclaim %s. age: %2f seconds. reason: %s ", - path, pretty_size(stat.st_size), (time.time() - stat.st_atime), self.name) + logging.debug( + "removed %s to reclaim %s. age: %2f seconds. reason: %s ", + path, + pretty_size(stat.st_size), + (time.time() - stat.st_atime), + self.name, + ) class RPTotalFileCount(RetentionPolicyBase): @@ -81,21 +84,21 @@ def name(self): return "total file size policy" def execute(self, file_infos: list) -> list: - def total_size(): return sum(finfo[1].st_size for finfo in file_infos) if total_size() >= self.high_watermark_bytes: logging.debug( "total ddeb size of %s exceeds the high watermark %s, starting cleanup.", - pretty_size(total_size()), pretty_size(self.high_watermark_bytes) + pretty_size(total_size()), + pretty_size(self.high_watermark_bytes), ) # Remove files until total size is below the low watermark while len(file_infos) > 0: if total_size() < self.low_wm_bytes: logging.debug( "total ddeb folder size is now below %s low watermark, stopping cleanup.", - pretty_size(self.low_wm_bytes) + pretty_size(self.low_wm_bytes), ) break ddeb_info = file_infos.pop() @@ -153,8 +156,9 @@ def execute(self, file_infos: list) -> list: @dataclass -class FolderRetentionManagerSettings(): +class FolderRetentionManagerSettings: """Settings for folder retention manager.""" + enabled: bool size_hwm: int size_lwm: int @@ -170,12 +174,12 @@ def validate_sanity(self): if self._size_enabled: if self.size_hwm < self.size_lwm: raise ExceptionWithLog( - "ddeb high watermark cannot be less than low watermark!") + "ddeb high watermark cannot be less than low watermark!" + ) -class FolderRetentionManager(): - """Policy-based folder retention manager. - """ +class FolderRetentionManager: + """Policy-based folder retention manager.""" def __init__(self, folder_paths, filter_function) -> None: self.folder_paths = folder_paths @@ -225,7 +229,8 @@ def execute_policies(self): files = policy.execute(files) logging.debug( "postrun-aftermath: ddeb folder final size %s, %d cached ddebs remain.", - pretty_size(sum(finfo[1].st_size for finfo in files)), len(files) + pretty_size(sum(finfo[1].st_size for finfo in files)), + len(files), ) return [x for x in self.files if x not in files] diff --git a/hotkdump/core/hotkdump.py b/hotkdump/core/hotkdump.py index 2769bc5..ff5b038 100644 --- a/hotkdump/core/hotkdump.py +++ b/hotkdump/core/hotkdump.py @@ -3,8 +3,7 @@ # Copyright 2023 Canonical Limited. # SPDX-License-Identifier: GPL-3.0 -"""The main `hotkdump` class implementation. -""" +"""The main `hotkdump` class implementation.""" import os import re @@ -27,32 +26,33 @@ try: from ubuntutools.pullpkg import PullPkg + # pylint: disable-next=import-private-name from ubuntutools.misc import _StderrProgressBar from ubuntutools import getLogger as ubuntutools_GetLogger except ModuleNotFoundError as exc: - raise ModuleNotFoundError("\n\n`hotkdump` needs ubuntu.pullpkg to function.\n" - "Install it via `sudo apt install ubuntu-dev-tools`") from exc + raise ModuleNotFoundError( + "\n\n`hotkdump` needs ubuntu.pullpkg to function.\n" + "Install it via `sudo apt install ubuntu-dev-tools`" + ) from exc from jinja2 import Template from hotkdump.core.exceptions import ExceptionWithLog from hotkdump.core.kdumpfile import KdumpFile from hotkdump.core.utils import pretty_size -from hotkdump.core.folder_retention_manager import( +from hotkdump.core.folder_retention_manager import ( FolderRetentionManager, - FolderRetentionManagerSettings -) -from hotkdump.core.utils import ( - mktemppath, - switch_cwd + FolderRetentionManagerSettings, ) +from hotkdump.core.utils import mktemppath, switch_cwd @dataclass() # pylint: disable-next=too-many-instance-attributes class HotkdumpParameters: """Parameters for hotkdump.""" + dump_file_path: str internal_case_number: str = None interactive: bool = False @@ -60,13 +60,14 @@ class HotkdumpParameters: log_file_path: str = mktemppath("hotkdump.log") ddebs_folder_path: str = mktemppath("hotkdump", "ddebs") ddeb_retention_settings: FolderRetentionManagerSettings = field( - default_factory=lambda : FolderRetentionManagerSettings( - enabled = True, - size_hwm = (1<<30) * 10, # 10GiB, - size_lwm = (1<<30) * 2, # 2GiB, - max_age_secs = 86400 * 15, # 15 days - max_count = 5 - )) + default_factory=lambda: FolderRetentionManagerSettings( + enabled=True, + size_hwm=(1 << 30) * 10, # 10GiB, + size_lwm=(1 << 30) * 2, # 2GiB, + max_age_secs=86400 * 15, # 15 days + max_count=5, + ) + ) print_vmcoreinfo_fields: list = None no_debuginfod: bool = False no_pullpkg: bool = False @@ -81,9 +82,9 @@ def validate_sanity(self): self.ddeb_retention_settings.validate_sanity() + class Hotkdump: - """the hotkdump class implementation. - """ + """the hotkdump class implementation.""" def __init__(self, parameters: HotkdumpParameters): """initialize a new hotkdump instance @@ -105,9 +106,11 @@ def __init__(self, parameters: HotkdumpParameters): self.touch_file(self.params.output_file_path) tstamp_now = datetime.now().strftime("%d/%m/%Y %H:%M:%S") - vmcore_filename = self.params.dump_file_path.rsplit('/', 1)[-1] + vmcore_filename = self.params.dump_file_path.rsplit("/", 1)[-1] with open(self.params.output_file_path, "w", encoding="utf-8") as outfile: - outfile.write(f"{tstamp_now}: processing {vmcore_filename} (CASE# {self.params.internal_case_number})\n") + outfile.write( + f"{tstamp_now}: processing {vmcore_filename} (CASE# {self.params.internal_case_number})\n" + ) self.kdump_file = KdumpFile(self.params.dump_file_path) @@ -115,7 +118,9 @@ def __init__(self, parameters: HotkdumpParameters): # pylint: disable=consider-using-with self.temp_working_dir = tempfile.TemporaryDirectory() logging.debug( - "created %s temporary directory for the intermediary files", self.temp_working_dir.name) + "created %s temporary directory for the intermediary files", + self.temp_working_dir.name, + ) # Create the ddeb path if not exists os.makedirs(self.params.ddebs_folder_path, exist_ok=True) @@ -138,7 +143,8 @@ def get_architecture(self): # FIXME(mkg): Add other architectures as well raise NotImplementedError( - f"Machine architecture {self.kdump_file.ddhdr.utsname.machine} not recognized!") + f"Machine architecture {self.kdump_file.ddhdr.utsname.machine} not recognized!" + ) @staticmethod def find_debuginfod_find_executable(): @@ -153,22 +159,23 @@ def find_crash_executable(): str: /crash if script path contains a `crash` symlink str: result of `which crash` otherwise """ - crash_symlink_path = os.path.dirname( - os.path.realpath(__file__)) + "/../crash" - crash = crash_symlink_path if os.path.exists( - crash_symlink_path) else shutil.which("crash") + crash_symlink_path = os.path.dirname(os.path.realpath(__file__)) + "/../crash" + crash = ( + crash_symlink_path + if os.path.exists(crash_symlink_path) + else shutil.which("crash") + ) if crash is None: raise ExceptionWithLog("Could not find the `crash` executable!") return crash def initialize_logging(self): - """Initialize logging for hotkdump - """ + """Initialize logging for hotkdump""" self.logger = logging.getLogger() file_logger = logging.FileHandler(filename=self.params.log_file_path) console_logger = logging.StreamHandler(sys.stdout) # Allow log level overrides from environment - level = os.environ.get('HOTKDUMP_LOGLEVEL', 'INFO').upper() + level = os.environ.get("HOTKDUMP_LOGLEVEL", "INFO").upper() for logger in (file_logger, console_logger, self.logger): logger.setLevel(level) @@ -180,7 +187,8 @@ def initialize_logging(self): for handler in ubuntutools_GetLogger().handlers: handler.addFilter( # pylint: disable=magic-value-comparison - lambda r: "Downloading" in r.msg or r.levelno >= logging.ERROR) + lambda r: "Downloading" in r.msg or r.levelno >= logging.ERROR + ) @staticmethod def touch_file(fname): @@ -197,7 +205,8 @@ def write_crash_commands_file(self): warnings.filterwarnings("ignore", category=DeprecationWarning) # Read & render the template jinja_template_content = read_text( - "hotkdump.templates", "crash_commands.jinja") + "hotkdump.templates", "crash_commands.jinja" + ) template = Template(jinja_template_content) rendered_content = template.render( @@ -208,7 +217,10 @@ def write_crash_commands_file(self): final_cmdfile_contents = textwrap.dedent(rendered_content).strip() ccfile.write(final_cmdfile_contents) logging.debug( - "command file %s rendered with contents: %s", commands_file, final_cmdfile_contents) + "command file %s rendered with contents: %s", + commands_file, + final_cmdfile_contents, + ) return ccfile.name @staticmethod @@ -239,26 +251,50 @@ def strip_release_variant_tags(value): str: Version string without release variant tags """ # see: https://ubuntu.com/kernel/variants#version-specific-kernels - tags = sorted(["generic", "lowlatency", "generic-hwe", - "lowlatency-hwe", "kvm", "aws", "azure", "azure-fde", - "gcp", "gke", "snapdragon", "raspi2"], key=len, reverse=True) - version_specific_tags = sorted([ - "generic-hwe-{}", "generic-hwe-{}", "lowlatency-hwe-{}", "lowlatency-hwe-{}"], key=len, reverse=True) + tags = sorted( + [ + "generic", + "lowlatency", + "generic-hwe", + "lowlatency-hwe", + "kvm", + "aws", + "azure", + "azure-fde", + "gcp", + "gke", + "snapdragon", + "raspi2", + ], + key=len, + reverse=True, + ) + version_specific_tags = sorted( + [ + "generic-hwe-{}", + "generic-hwe-{}", + "lowlatency-hwe-{}", + "lowlatency-hwe-{}", + ], + key=len, + reverse=True, + ) versions = ["16.04", "18.04", "20.04", "22.04", "24.04"] for vtag in version_specific_tags: for version in versions: - value = value.replace("-" + vtag.format(version) + '-edge', '') - value = value.replace("-" + vtag.format(version), '') + value = value.replace("-" + vtag.format(version) + "-edge", "") + value = value.replace("-" + vtag.format(version), "") for tag in tags: - value = value.replace(f"-{tag}", '') - value = value.replace(f"-{tag}-edge", '') + value = value.replace(f"-{tag}", "") + value = value.replace(f"-{tag}-edge", "") validator_regex = re.compile(r"^\d+\.\d+\.\d+-\d+$") if not validator_regex.match(value): raise ExceptionWithLog( - f"The stripped release did not yield a valid version! ({value})") + f"The stripped release did not yield a valid version! ({value})" + ) return value @@ -273,9 +309,15 @@ def _digest_debuginfod_find_output(self, line): name, content = http_match.groups() log_fns = { - "size": lambda : logging.info("debuginfod-find: vmlinux size: %s", pretty_size(int(content))), - "archive": lambda : logging.info("debuginfod-find: `.ddeb` file name: %s", content), - "file": lambda : logging.info("debuginfod-find: vmlinux file name: %s", content) + "size": lambda: logging.info( + "debuginfod-find: vmlinux size: %s", pretty_size(int(content)) + ), + "archive": lambda: logging.info( + "debuginfod-find: `.ddeb` file name: %s", content + ), + "file": lambda: logging.info( + "debuginfod-find: vmlinux file name: %s", content + ), } if name in log_fns: @@ -287,8 +329,10 @@ def _digest_debuginfod_find_output(self, line): if not self.debuginfod_find_progress: # In order to be consistent,.we're using the same # progress bar that PullPkg uses. - self.debuginfod_find_progress = _StderrProgressBar(os.get_terminal_size(sys.stderr.fileno()).columns) - current, maximum = [int(v) for v in progress_match.groups()] + self.debuginfod_find_progress = _StderrProgressBar( + os.get_terminal_size(sys.stderr.fileno()).columns + ) + current, maximum = [int(v) for v in progress_match.groups()] if maximum > 0: pct = int((current / maximum) * 100) self.debuginfod_find_progress.update(pct, 100) @@ -304,7 +348,9 @@ def maybe_download_vmlinux_via_debuginfod(self): build_id = self.kdump_file.vmcoreinfo.get("BUILD-ID") if not build_id: - logging.info("cannot use debuginfod-find - BUILD-ID not found in vmcoreinfo!") + logging.info( + "cannot use debuginfod-find - BUILD-ID not found in vmcoreinfo!" + ) return None debuginfod_find_args = f"-vvv debuginfo {build_id}" @@ -332,7 +378,11 @@ def maybe_download_vmlinux_via_debuginfod(self): logging.info("debuginfod-find: succeeded, vmcore path: `%s`", line) return line - logging.info("debuginfod-find: download for BUILD-ID `%s` failed with `%s`", build_id, line) + logging.info( + "debuginfod-find: download for BUILD-ID `%s` failed with `%s`", + build_id, + line, + ) return None finally: self.debuginfod_find_progress = None @@ -351,7 +401,7 @@ def maybe_download_vmlinux_via_pullpkg(self): self.kdump_file.ddhdr.utsname.release, self.strip_release_variant_tags(self.kdump_file.ddhdr.utsname.release), self.kdump_file.ddhdr.utsname.normalized_version, - self.get_architecture() + self.get_architecture(), ) with switch_cwd(self.params.ddebs_folder_path): @@ -360,28 +410,36 @@ def maybe_download_vmlinux_via_pullpkg(self): # Already exists, do not download again # TODO(mkg): Verify SHA checksum? logging.info( - "The .ddeb file %s already exists, re-using it", expected_ddeb_path) + "The .ddeb file %s already exists, re-using it", expected_ddeb_path + ) # Ensure that the file's last access time is updated os.utime(expected_ddeb_path, (time.time(), time.time())) return expected_ddeb_path logging.info( "Downloading `vmlinux` image for kernel version %s, please be patient...", - self.kdump_file.ddhdr.utsname.release) + self.kdump_file.ddhdr.utsname.release, + ) # (mkg): To force pull-lp-ddebs to use launchpadlibrarian.net for download # pass an empty mirror list env variable to the hotkdump, e.g.: # UBUNTUTOOLS_UBUNTU_DDEBS_MIRROR= python3 hotkdump.py -c 123 -d dump.dump - pull_args = ["--distro", "ubuntu", "--arch", self.get_architecture(), "--pull", "ddebs", - f"linux-image-unsigned-{self.kdump_file.ddhdr.utsname.release}", - f"{self.strip_release_variant_tags(self.kdump_file.ddhdr.utsname.release)}" - f".{self.kdump_file.ddhdr.utsname.normalized_version}"] + pull_args = [ + "--distro", + "ubuntu", + "--arch", + self.get_architecture(), + "--pull", + "ddebs", + f"linux-image-unsigned-{self.kdump_file.ddhdr.utsname.release}", + f"{self.strip_release_variant_tags(self.kdump_file.ddhdr.utsname.release)}" + f".{self.kdump_file.ddhdr.utsname.normalized_version}", + ] logging.info("Invoking PullPkg().pull with %s", str(pull_args)) PullPkg().pull(pull_args) if not os.path.exists(expected_ddeb_path): - raise ExceptionWithLog( - f"failed to download {expected_ddeb_path}") + raise ExceptionWithLog(f"failed to download {expected_ddeb_path}") return expected_ddeb_path @@ -397,33 +455,48 @@ def extract_vmlinux_ddeb(self, ddeb_file): ddeb_extract_dst = f"{self.temp_working_dir.name}/ddeb-root" dpkg_deb_args = f"-x {ddeb_file} {ddeb_extract_dst}" logging.info( - "Extracting %s to %s, please be patient...", ddeb_file, ddeb_extract_dst) + "Extracting %s to %s, please be patient...", ddeb_file, ddeb_extract_dst + ) with switch_cwd(self.params.ddebs_folder_path): result = self.exec("dpkg", dpkg_deb_args) if result.returncode != 0: raise ExceptionWithLog( - f"failed to extract {ddeb_file}: {result.stderr.readlines()}") + f"failed to extract {ddeb_file}: {result.stderr.readlines()}" + ) - return self.temp_working_dir.name + \ - f"/ddeb-root/usr/lib/debug/boot/vmlinux-{self.kdump_file.ddhdr.utsname.release}" + return ( + self.temp_working_dir.name + + f"/ddeb-root/usr/lib/debug/boot/vmlinux-{self.kdump_file.ddhdr.utsname.release}" + ) - def summarize_vmcore_file(self, vmlinux_path:str): - """Print a summary of the vmcore file to the output file - """ - logging.info("Loading `vmcore` file %s into `crash`, please wait..", self.params.dump_file_path) + def summarize_vmcore_file(self, vmlinux_path: str): + """Print a summary of the vmcore file to the output file""" + logging.info( + "Loading `vmcore` file %s into `crash`, please wait..", + self.params.dump_file_path, + ) commands_file_path = self.write_crash_commands_file() - self.exec(self.crash_executable, - f"-x -i {commands_file_path} -s {self.params.dump_file_path} {vmlinux_path}") - logging.info("See %s for logs, %s for outputs", self.params.log_file_path, self.params.output_file_path) + self.exec( + self.crash_executable, + f"-x -i {commands_file_path} -s {self.params.dump_file_path} {vmlinux_path}", + ) + logging.info( + "See %s for logs, %s for outputs", + self.params.log_file_path, + self.params.output_file_path, + ) - def launch_crash(self, vmlinux_path:str): + def launch_crash(self, vmlinux_path: str): """Launch the `crash` application with the user-given vmcore and downloaded vmlinux image file """ logging.info( - "Loading `vmcore` file %s into `crash`, please wait..", self.params.dump_file_path) - self.exec(self.crash_executable, - f"-x {self.params.dump_file_path} {vmlinux_path}") + "Loading `vmcore` file %s into `crash`, please wait..", + self.params.dump_file_path, + ) + self.exec( + self.crash_executable, f"-x {self.params.dump_file_path} {vmlinux_path}" + ) def run(self): """Run hotkdump main routine.""" @@ -459,8 +532,9 @@ def run(self): self.post_run() def post_run(self): - """Perform post-run tasks - """ - retention_mgr = FolderRetentionManager([self.params.ddebs_folder_path], lambda file : file.endswith(".ddeb")) + """Perform post-run tasks""" + retention_mgr = FolderRetentionManager( + [self.params.ddebs_folder_path], lambda file: file.endswith(".ddeb") + ) retention_mgr.load_policies_from_settings(self.params.ddeb_retention_settings) retention_mgr.execute_policies() diff --git a/hotkdump/core/kdumpfile.py b/hotkdump/core/kdumpfile.py index 323ab60..1c21434 100644 --- a/hotkdump/core/kdumpfile.py +++ b/hotkdump/core/kdumpfile.py @@ -346,7 +346,6 @@ def vmcoreinfo(self): return self._vmcoreinfo def __str__(self) -> str: - return " | ".join( # Get all attributes, filter out the built-in ones # and stringify the rest in "name:value" format @@ -393,7 +392,6 @@ def parse_flattened(self, fd): # Fetch the next chunk. mdhdr = mdhdr.next(fd) while mdhdr: - # If the current chunk offset is the offset for kdump_sub_header, # parse it. if mdhdr.offset == kdump_sub_header_off: @@ -447,7 +445,5 @@ def parse_compressed(self, fd): # Parse vmcoreinfo self._vmcoreinfo = VMCoreInfo.from_fd( - fd, - self.ksubhdr.offset_vmcoreinfo, - self.ksubhdr.size_vmcoreinfo + fd, self.ksubhdr.offset_vmcoreinfo, self.ksubhdr.size_vmcoreinfo ) diff --git a/hotkdump/core/utils.py b/hotkdump/core/utils.py index bb3f28e..adf3440 100644 --- a/hotkdump/core/utils.py +++ b/hotkdump/core/utils.py @@ -3,24 +3,24 @@ # Copyright 2023 Canonical Limited. # SPDX-License-Identifier: GPL-3.0 -"""`hotkdump` helper utilities. -""" +"""`hotkdump` helper utilities.""" import os import tempfile import contextlib + def pretty_size(amount_bytes): """Get human-readable file sizes. simplified version of https://pypi.python.org/pypi/hurry.filesize/ """ units_mapping = [ - (1<<50, 'PB'), - (1<<40, 'TB'), - (1<<30, 'GB'), - (1<<20, 'MB'), - (1<<10, 'KB'), - (1, ('byte', 'bytes')), + (1 << 50, "PB"), + (1 << 40, "TB"), + (1 << 30, "GB"), + (1 << 20, "MB"), + (1 << 10, "KB"), + (1, ("byte", "bytes")), ] for factor, suffix in units_mapping: if amount_bytes < factor: @@ -34,10 +34,12 @@ def pretty_size(amount_bytes): suffix = multiple return f"{amount:.2f} {suffix}" + def mktemppath(*args): """Create a path to the system's temp directory.""" return os.path.join(tempfile.gettempdir(), *args) + @contextlib.contextmanager def switch_cwd(wd): """Save current working directory and temporarily diff --git a/hotkdump/main.py b/hotkdump/main.py index 4b2d67d..0cb3fe4 100755 --- a/hotkdump/main.py +++ b/hotkdump/main.py @@ -3,8 +3,7 @@ # Copyright 2023 Canonical Limited. # SPDX-License-Identifier: GPL-3.0 -""" `hotkdump` CLI entry point. -""" +"""`hotkdump` CLI entry point.""" import sys import os @@ -21,8 +20,8 @@ # These are actually need to be run after the sys.path is updated, so # we're silencing the warning. # pylint: disable=wrong-import-position -from hotkdump.core.hotkdump import Hotkdump, HotkdumpParameters # noqa: E402 -from hotkdump.core.exceptions import NotAKernelCrashDumpException # noqa: E402 +from hotkdump.core.hotkdump import Hotkdump, HotkdumpParameters # noqa: E402 +from hotkdump.core.exceptions import NotAKernelCrashDumpException # noqa: E402 def main(): @@ -35,7 +34,7 @@ def main(): "--dump-file-path", help="Path to the Linux kernel crash dump", required=True, - default=argparse.SUPPRESS + default=argparse.SUPPRESS, ) ap.add_argument( "-c", @@ -68,7 +67,7 @@ def main(): required=False, help="Read and print the specified VMCOREINFO fields from the given kernel crash dump, then exit.", nargs="*", - default=argparse.SUPPRESS + default=argparse.SUPPRESS, ) download_methods_group = ap.add_mutually_exclusive_group() download_methods_group.add_argument( diff --git a/pyproject.toml b/pyproject.toml index 7ccf205..0696762 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -165,3 +165,6 @@ notes = '''FIXME, # List of names allowed to shadow builtins allowed-redefined-builtins='' ######################################### + +[tool.ruff] +target-version = "py37" diff --git a/tests/test_hotkdump.py b/tests/test_hotkdump.py index 347a5c6..186dc6b 100644 --- a/tests/test_hotkdump.py +++ b/tests/test_hotkdump.py @@ -13,12 +13,7 @@ from hotkdump.core.hotkdump import Hotkdump, HotkdumpParameters, ExceptionWithLog -from tests.utils import ( - assert_has_no_such_calls, - MockFileCtx, - MockStatObj, - MOCK_HDR -) +from tests.utils import assert_has_no_such_calls, MockFileCtx, MockStatObj, MOCK_HDR mock.Mock.assert_has_no_such_calls = assert_has_no_such_calls @@ -40,7 +35,7 @@ class HotkdumpTest(TestCase): """test hotkdump class public api""" def setUp(self): - self.patcher = mock.patch('tempfile.TemporaryDirectory') + self.patcher = mock.patch("tempfile.TemporaryDirectory") self.mock_temp_dir = self.patcher.start() def tearDown(self): @@ -583,7 +578,6 @@ def test_post_run_ddeb_retention_disabled( files post-run when the file retention is disabled. """ with mock.patch("os.remove") as mock_remove: - params = HotkdumpParameters(dump_file_path="empty") params.ddebs_folder_path = "/path/to/ddebs" params.ddeb_retention_settings.enabled = False diff --git a/tests/test_kdump_file_header.py b/tests/test_kdump_file_header.py index 4122685..103a4a6 100644 --- a/tests/test_kdump_file_header.py +++ b/tests/test_kdump_file_header.py @@ -139,7 +139,6 @@ def test_from_bytes_io(self, mfile): class TestKdumpSubHeader(TestCase): - def setUp(self): self.fake_file_content = ( b"\x01\x00\x00\x00\x00\x00\x00\x00" # phys_base @@ -210,7 +209,6 @@ class TestKdumpFile(TestCase): @mock.patch("builtins.open", new_callable=mock.mock_open, read_data=b"") def test_init_valid_kdump_file_flattened(self, mfile): - fake_vmcoreinfo = b"""key=value this is a key=value value $$=@@ @@ -393,7 +391,9 @@ def test_init_invalid_signature(self, mfile): with self.assertRaises(NotAKernelCrashDumpException): _ = KdumpFile("dummy_path") - @mock.patch("builtins.open", MockFileCtx(file_bytes=MOCK_HDR_INVALID_NO_SIG, name="name")) + @mock.patch( + "builtins.open", MockFileCtx(file_bytes=MOCK_HDR_INVALID_NO_SIG, name="name") + ) def test_kdump_hdr_no_sig(self): """Test kdump file header parsing with garbage input. diff --git a/tests/utils.py b/tests/utils.py index e20556d..1525975 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 -"""Test utility/helper types. -""" +"""Test utility/helper types.""" # Copyright 2023 Canonical Limited. # SPDX-License-Identifier: GPL-3.0 @@ -9,7 +8,7 @@ import io -def pad(by, n, pad_chr = b"\0"): +def pad(by, n, pad_chr=b"\0"): """Fill the remaning space of a bytes array with zeros.""" by += pad_chr * (n - len(by)) return by @@ -48,6 +47,7 @@ def assert_has_no_such_calls(self, *args, **kwargs): class IOAdapter(io.BytesIO): """Mock BytesIO class.""" + def write(self, value): if isinstance(value, str): return super().write(value.encode()) @@ -67,6 +67,7 @@ def __call__(self, *args, **kwargs): class MockFileObject(IOAdapter): """Mock file object.""" + def __init__(self, file_bytes, name): super().__init__(file_bytes) self.name = name @@ -101,6 +102,7 @@ def write(self, *args, **kwargs): class MockStatObj: """Poor man's stat object.""" + def __init__(self, name, mock_data) -> None: self.name = name self.mock_data = mock_data diff --git a/tox.ini b/tox.ini index 376bedc..b8287b8 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,7 @@ [tox] -env_list = py{36,37,38,39,310,311,312},pylint,ruff +env_list = py{36,37,38,39,310,311,312},pylint,ruff-check,ruff-format,ruff-format-check skipsdist = true +isolated_build=true [testenv] unit_tests = {toxinidir}/tests/ @@ -22,7 +23,7 @@ deps = # Note that this is awkwardly installs the package # itself and not only [optional-dependencies.testing]. # see: https://github.com/pypa/pip/issues/11440 - py{36,37,38,39,310,311,312},pylint,ruff: .[testing] # Install & test dependencies + py{36,37,38,39,310,311,312},pylint,ruff-check,ruff-format-check,ruff-format: .[testing] # Install & test dependencies commands = py36: pip3 install toml py36: bash -c "pip3 install $(python extras/py36-all-requirements.py)" @@ -34,8 +35,23 @@ commands = # It's fine for unit tests to not have docstrings. Also, pytest does not like static test cases. pylint --recursive=y -v {posargs:{[testenv]unit_tests}} --disable=missing-function-docstring,missing-class-docstring,no-self-use -[testenv:ruff] +[testenv:ruff-check] +# ruff is not compatible with py36 atm. +basepython = py37,py38,py39,py310,py311,py312 commands = - # ruff is not compatible with py36 atm. - py{37,38,39,310,311,312}: ruff check {posargs:{[testenv]module_files}} - py{37,38,39,310,311,312}: ruff check {posargs:{[testenv]unit_tests}} \ No newline at end of file + ruff check {posargs:{[testenv]module_files}} + ruff check {posargs:{[testenv]unit_tests}} + +[testenv:ruff-format] +# ruff is not compatible with py36 atm. +basepython = py37,py38,py39,py310,py311,py312 +commands = + ruff format --target-version=py37 {posargs:{[testenv]module_files}} + ruff format --target-version=py37 {posargs:{[testenv]unit_tests}} + +[testenv:ruff-format-check] +# ruff is not compatible with py36 atm. +basepython = py37,py38,py39,py310,py311,py312 +commands = + ruff format --target-version=py37 --diff {posargs:{[testenv]module_files}} + ruff format --target-version=py37 --diff {posargs:{[testenv]unit_tests}} \ No newline at end of file