From 3e2a1db334b8a0fef676147ac590db41f08100a2 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Wed, 27 Nov 2024 14:17:07 +0100 Subject: [PATCH 01/19] Fix glob pattern for compiles packages --- irdb/publish.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/irdb/publish.py b/irdb/publish.py index acb10da6..e7fafda4 100644 --- a/irdb/publish.py +++ b/irdb/publish.py @@ -154,9 +154,13 @@ def zip_package_folder(pkg_name: str, zip_name: str) -> Path: def _get_local_path(pkg_name: str, stable: bool) -> Path: + # TODO: add support for additional same day versions + pattern = f"{pkg_name}.*{'' if stable else '.dev'}.zip" try: - zipped_versions = (path for path in ZIPPED_DIR.glob(f"{pkg_name}*.zip") - if _is_stable(path.stem) == stable) + zipped_versions = ( + path for path in ZIPPED_DIR.glob(pattern) + if _is_stable(path.stem) == stable + ) local_path = max(zipped_versions, key=lambda path: path.stem) except ValueError as err: raise ValueError(f"No compiled version of '{pkg_name}' found for " From d456542d25e3f70c80ce4d6116d888062663a78a Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 12:46:26 +0200 Subject: [PATCH 02/19] Docstrigns, formatting, and some rephrasing Update test case --- irdb/publish.py | 219 +++++++++++++++++++++++-------------- irdb/tests/test_publish.py | 2 +- 2 files changed, 138 insertions(+), 83 deletions(-) diff --git a/irdb/publish.py b/irdb/publish.py index e7fafda4..055557bb 100644 --- a/irdb/publish.py +++ b/irdb/publish.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -"""Publish and upload irdb packages""" +"""Publish and upload irdb packages.""" import argparse import logging @@ -31,6 +31,7 @@ class Password: """Used for secure pwd promt.""" + DEFAULT = "Prompt if not specified" def __init__(self, value): @@ -47,7 +48,7 @@ def __eq__(self, other): def publish(pkg_names=None, compilezip=False, upload=True, login=None, password=None, update_version=True): """ - Should be as easy as just calling this function to republish all packages + Should be as easy as just calling this function to republish all packages. Parameters ---------- @@ -77,7 +78,7 @@ def publish(pkg_names=None, compilezip=False, upload=True, def make_package(pkg_name: str, stable: bool = False, keep_version: bool = False) -> str: """ - Makes a package (todo: update this description!) + Make a package (todo: update this description!). By default, make_package updates the version to today. `keep_version` can be set to True in order to use the existing version. This is a step towards @@ -109,9 +110,11 @@ def make_package(pkg_name: str, stable: bool = False, if not keep_version: # Collect the info for the version.yaml file time = dt.now(UTC) - version_dict = {"version": f"{time.date()}{suffix}", - "timestamp": time.strftime("%Y-%m-%d %H:%M:%S"), - "release": "stable" if stable else "dev"} + version_dict = { + "version": f"{time.date()}{suffix}", + "timestamp": time.strftime("%Y-%m-%d %H:%M:%S"), + "release": "stable" if stable else "dev", + } # Add a version.yaml file to the package pkg_version_path.write_text(yaml.dump(version_dict), encoding="utf-8") @@ -124,16 +127,18 @@ def make_package(pkg_name: str, stable: bool = False, # Make the zip file zip_name = f"{pkg_name}.{time.date()}{suffix}.zip" zip_package_folder(pkg_name, zip_name) - logging.info("[%s]: Compiled package: %s", - dt.now().strftime("%Y-%m-%d %H:%M:%S"), - zip_name.strip(".zip")) + logging.info( + "[%s]: Compiled package: %s", + dt.now().strftime("%Y-%m-%d %H:%M:%S"), + zip_name.strip(".zip"), + ) return zip_name def zip_package_folder(pkg_name: str, zip_name: str) -> Path: """ - Create a zip file of packages in `pkg_names` + Create a zip file of packages in `pkg_names`. Directories `__pycache__` and hidden files (starting with `.`) are ignored. @@ -152,7 +157,6 @@ def zip_package_folder(pkg_name: str, zip_name: str) -> Path: return zip_pkg_path - def _get_local_path(pkg_name: str, stable: bool) -> Path: # TODO: add support for additional same day versions pattern = f"{pkg_name}.*{'' if stable else '.dev'}.zip" @@ -163,22 +167,29 @@ def _get_local_path(pkg_name: str, stable: bool) -> Path: ) local_path = max(zipped_versions, key=lambda path: path.stem) except ValueError as err: - raise ValueError(f"No compiled version of '{pkg_name}' found for " - f"condition '{stable=}'.") from err + raise ValueError( + f"No compiled version of '{pkg_name}' found for " + f"condition '{stable=}'." + ) from err return local_path def _handle_missing_folder(pkg_name: str): print(f"No server folder specified for package '{pkg_name}'.") - proceed = input("Do you want to add a server folder now? Upload will be " - f"aborted otherwise. Also check spelling for '{pkg_name}' " - "before proceeding! (y)/n: ") + proceed = input( + "Do you want to add a server folder now? Upload will be aborted " + f"otherwise. Also check spelling for '{pkg_name}' before proceeding!" + " (y)/n: " + ) if not proceed.lower() == "y": raise KeyboardInterrupt("Execution aborted by user.") - new_folder = input("Allowed values for server folder are: 'locations', " - "'telescopes' and 'instruments': ") - if new_folder not in {"locations", "telescopes", "instruments"}: + allowed = {"locations", "telescopes", "instruments"} + # TODO: py311 py312 Remove silly triple quotes... + new_folder = input( + f"""Allowed values for server folder are: '{"', '".join(allowed)}': """ + ) + if new_folder not in allowed: raise ValueError("Invalid input.") with PATH_FOLDERS_YAML.open("r", encoding="utf-8") as file: @@ -196,11 +207,10 @@ def _get_server_path(pkg_name: str, local_name: str) -> str: with PATH_FOLDERS_YAML.open("r", encoding="utf-8") as file: folders = yaml.safe_load(file) try: - server_path = f"{folders[pkg_name]}/{local_name}" + folder = folders[pkg_name] except KeyError: - folders = _handle_missing_folder(pkg_name) - server_path = f"{folders[pkg_name]}/{local_name}" - return server_path + folder = _handle_missing_folder(pkg_name)[pkg_name] + return f"{folder}/{local_name}" def confirm(pkg_name: str) -> bool: @@ -210,18 +220,22 @@ def confirm(pkg_name: str) -> bool: except (KeyError, ValueError): current_stable = "" - proceed = input("This will supersede the current STABLE version " - f"({current_stable}) of '{pkg_name}' on the IRDB server. " - "The uploaded package will be set as the new default " - f"for '{pkg_name}'.\nAre you sure you want to continue?" - " (y)/n: ") + proceed = input( + f"This will supersede the current STABLE version ({current_stable}) of" + f" '{pkg_name}' on the IRDB server. The uploaded package will be set " + f"as the new default for '{pkg_name}'.\nAre you sure you want to " + "continue? (y)/n: " + ) return proceed.lower() == "y" -def push_to_server(pkg_name: str, stable: bool = False, - login: Optional[str] = None, - password: Optional[Password] = None, - no_confirm: bool = False) -> None: +def push_to_server( + pkg_name: str, + stable: bool = False, + login: Optional[str] = None, + password: Optional[Password] = None, + no_confirm: bool = False, +) -> None: """ Upload a package to the univie server. @@ -253,8 +267,11 @@ def push_to_server(pkg_name: str, stable: bool = False, local_path = _get_local_path(pkg_name, stable) server_path = _get_server_path(pkg_name, local_path.name) - if not local_path.stem.endswith("dev") and not no_confirm and \ - not confirm(pkg_name): + if ( + not local_path.stem.endswith("dev") + and not no_confirm + and not confirm(pkg_name) + ): return cnopts = pysftp.CnOpts() @@ -289,50 +306,81 @@ def push_packages_yaml_to_server(login, password): def main(): - """main CLI script""" - parser = argparse.ArgumentParser(prog="publish", - description=("Set a new version number, compile (zip) the specified " - "packages and (optionally) push them to the IRDB server. " - "This command must be run from the IRDB root directory.")) - - parser.add_argument("pkg_names", - nargs="+", - help="Name(s) of the package(s).") - parser.add_argument("-l", - dest="username", - required=True, - help=r"UniVie u:space username - e.g. u\kieranl14.") - parser.add_argument("-p", - dest="password", - type=Password, - default=Password.DEFAULT, - help=("UniVie u:space password. If left empty, a " - "secure prompt will appear, which is the " - "recommended usage. Supplying the password " - "directly via this argument is unsecure and " - "only included for script support.")) - parser.add_argument("-c", "--compile", - action="store_true", - help="Compile all files in a PKG folder to a .zip archive.") - parser.add_argument("-u", "--upload", - action="store_true", - help="Upload the package .zip archive to the server.") - parser.add_argument("-s", "--stable", - action="store_true", - help=("Build as a stable version. By default, a dev " - "version is created. Publishing a stable version " - "requires to set this option, and a manual " - "conformation will be asked from the user.")) - parser.add_argument("-k", "--keep-version", - action="store_true", - help=("Keep the current package version number. " - "By default, running this script will bump the " - "package version number (date) and timestamp. " - "Set this option to prevent that.")) - parser.add_argument("--no-confirm", - action="store_true", - help=("Don't ask for confirmation when uploading " - "stable package. Only for CI/CD use!")) + """Execute main CLI script.""" + parser = argparse.ArgumentParser( + prog="publish", + description=( + "Set a new version number, compile (zip) the specified packages " + "and (optionally) push them to the IRDB server. This command must " + "be run from the IRDB root directory." + ), + ) + + parser.add_argument( + "pkg_names", nargs="+", help="Name(s) of the package(s)." + ) + parser.add_argument( + "-l", + dest="username", + required=True, + help=r"UniVie u:space username - e.g. u\kieranl14.", + ) + parser.add_argument( + "-p", + dest="password", + type=Password, + default=Password.DEFAULT, + help=( + "UniVie u:space password. If left empty, a secure prompt will " + "appear, which is the recommended usage. Supplying the password " + "directly via this argument is unsecure and only included for " + "script support." + ), + ) + parser.add_argument( + "-c", + "--compile", + action="store_true", + help=( + "Compile all files in a PKG folder to a .zip archive. If not set, " + "the latest comiled version will be used (error if none is found)." + ), + ) + parser.add_argument( + "-u", + "--upload", + action="store_true", + help="Upload the package .zip archive to the server.", + ) + parser.add_argument( + "-s", + "--stable", + action="store_true", + help=( + "Build as a stable version. By default, a dev version is created. " + "Publishing a stable version requires to set this option, and a " + "manual conformation will be asked from the user." + ), + ) + parser.add_argument( + "-k", + "--keep-version", + action="store_true", + help=( + "Keep the current package version number as in the package's " + "version.yaml file. By default, running this script will bump the " + "package version number (date) and timestamp. Set this option to " + "prevent that." + ), + ) + parser.add_argument( + "--no-confirm", + action="store_true", + help=( + "Don't ask for confirmation when uploading stable package. Only " + "for CI/CD use!" + ), + ) args = parser.parse_args() if args.compile: @@ -340,11 +388,18 @@ def main(): make_package(pkg_name, args.stable, args.keep_version) if args.upload: for pkg_name in args.pkg_names: - push_to_server(pkg_name, args.stable, args.username, args.password, - args.no_confirm) + push_to_server( + pkg_name, + args.stable, + args.username, + args.password, + args.no_confirm, + ) if not args.compile and not args.upload: - logging.warning(("Neither `compile` nor `upload` was set. " - "No action will be performed.")) + logging.warning( + "Neither `compile` nor `upload` option was set. " + "No action will be performed." + ) if __name__ == "__main__": diff --git a/irdb/tests/test_publish.py b/irdb/tests/test_publish.py index b66eaf29..29198b04 100644 --- a/irdb/tests/test_publish.py +++ b/irdb/tests/test_publish.py @@ -228,7 +228,7 @@ def test_multiple_packages(default_argv): @pytest.mark.usefixtures("default_argv", "caplog") def test_warning_no_action(default_argv, caplog): - warnmsg = ("Neither `compile` nor `upload` was set. " + warnmsg = ("Neither `compile` nor `upload` option was set. " "No action will be performed.") with mock.patch("sys.argv", default_argv): pub.main() From edc0e96e028c19c1963060b50e9533d8a617f281 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 13:32:58 +0200 Subject: [PATCH 03/19] Fully deprecate old publish function --- irdb/publish.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/irdb/publish.py b/irdb/publish.py index 055557bb..04e69c8a 100644 --- a/irdb/publish.py +++ b/irdb/publish.py @@ -63,9 +63,8 @@ def publish(pkg_names=None, compilezip=False, upload=True, False: use version in /version.yaml See make_package(). """ - warn(("This function is only kept for backwards compatibility and might " - "be fully deprecated in the future."), - PendingDeprecationWarning, stacklevel=2) + warn(("This function is deprecated and will be removed by the end of 2025."), + FutureWarning, stacklevel=2) for pkg_name in pkg_names: if compilezip: make_package(pkg_name, From aa236ee5ea631ffcb78a1e11f4b16a716b336c34 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 13:34:04 +0200 Subject: [PATCH 04/19] Remove long-deprecated `push_packages_yaml_to_server()` --- irdb/publish.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/irdb/publish.py b/irdb/publish.py index 04e69c8a..514cf76c 100644 --- a/irdb/publish.py +++ b/irdb/publish.py @@ -288,22 +288,6 @@ def push_to_server( return -def push_packages_yaml_to_server(login, password): - """ - Sync the packages.yaml file on the server with the current local one - - Parameters - ---------- - login, password : str - Univie u:space username and password - - """ - warn(("ANY use of packages.yaml is deprecated. " - "No upload will be performed. " - "This function will be removed in the next major release."), - DeprecationWarning, stacklevel=2) - - def main(): """Execute main CLI script.""" parser = argparse.ArgumentParser( From 424ae3b10bea1a2e9cfe3412561502cfa4d43e4a Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 13:43:38 +0200 Subject: [PATCH 05/19] Replace pysftp with pure paramiko The pysftp package is abandoned and was always just a wrapper for paramiko anyway, which itself is still actively supported and does all we need. --- irdb/publish.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/irdb/publish.py b/irdb/publish.py index 514cf76c..6af41ab2 100644 --- a/irdb/publish.py +++ b/irdb/publish.py @@ -12,7 +12,7 @@ from zipfile import ZIP_DEFLATED, ZipFile import yaml -import pysftp +import paramiko try: from .publish_utils import _is_stable, get_stable, get_all_package_versions @@ -263,6 +263,7 @@ def push_to_server( if password is None: raise ValueError("Password is None. Check email for password") + hostname = "webspace-access.univie.ac.at" local_path = _get_local_path(pkg_name, stable) server_path = _get_server_path(pkg_name, local_path.name) @@ -273,16 +274,13 @@ def push_to_server( ): return - cnopts = pysftp.CnOpts() - cnopts.hostkeys = None - sftp = pysftp.Connection(host="webspace-access.univie.ac.at", - username=login, password=password.value, - cnopts=cnopts) + with paramiko.Transport((hostname, 22)) as transport: + transport.connect(username=login, password=password.value) + + with paramiko.SFTPClient.from_transport(transport) as sftp: + sftp.chdir("scopesimu68/html/InstPkgSvr/") + sftp.put(local_path, server_path, confirm=True) - with sftp.cd("scopesimu68/html/InstPkgSvr/"): - if sftp.exists(server_path): - sftp.remove(server_path) - sftp.put(local_path, server_path) now = dt.now().strftime("%Y-%m-%d %H:%M:%S") print(f"[{now}]: Pushed to server: {pkg_name}") return From bf12b58dca48eea629ee02bc546fac904f1e8117 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 13:45:59 +0200 Subject: [PATCH 06/19] Remove pysftp dependency and relax paramiko version --- irdb/tests/test_publish.py | 2 +- requirements.github_actions.txt | 3 +-- requirements.readthedocs.txt | 1 - setup.py | 3 +-- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/irdb/tests/test_publish.py b/irdb/tests/test_publish.py index 29198b04..d2e78cbd 100644 --- a/irdb/tests/test_publish.py +++ b/irdb/tests/test_publish.py @@ -158,7 +158,7 @@ def test_call_confirm(default_argv, temp_zipfiles, argv, called, response): mock.Mock(return_value=response)) as mock_confirm: with mock.patch("irdb.publish.ZIPPED_DIR", temp_zipfiles): # Catch exception raised by fake login credentials - authex = pub.pysftp.paramiko.ssh_exception.AuthenticationException + authex = pub.paramiko.ssh_exception.AuthenticationException if called and not response: # Should abort -> no authex raised diff --git a/requirements.github_actions.txt b/requirements.github_actions.txt index 2336bcd8..455220bc 100644 --- a/requirements.github_actions.txt +++ b/requirements.github_actions.txt @@ -3,8 +3,7 @@ numpy matplotlib astropy pyyaml -pysftp -paramiko<=3.5.1 +paramiko photutils scopesim scopesim_templates diff --git a/requirements.readthedocs.txt b/requirements.readthedocs.txt index aa3616e4..4343d8ef 100644 --- a/requirements.readthedocs.txt +++ b/requirements.readthedocs.txt @@ -7,7 +7,6 @@ docutils requests beautifulsoup4 pyyaml -pysftp # git+https://github.com/AstarVienna/ScopeSim.git@master_rtd scopesim diff --git a/setup.py b/setup.py index 2b65c212..dee36f82 100644 --- a/setup.py +++ b/setup.py @@ -38,8 +38,7 @@ def setup_package(): "beautifulsoup4>=4.4", "lxml[html_clean]", "pyyaml>5.1", - "pysftp", - "paramiko<=3.5.1", + "paramiko", "synphot>=0.1.3", "skycalc_ipy>=0.1.3", From c6892906c9c0b870683af7553c129c3e6ab6510f Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 13:52:35 +0200 Subject: [PATCH 07/19] Remove useless usefixtures --- irdb/tests/test_publish.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/irdb/tests/test_publish.py b/irdb/tests/test_publish.py index d2e78cbd..d6124bb0 100644 --- a/irdb/tests/test_publish.py +++ b/irdb/tests/test_publish.py @@ -77,7 +77,6 @@ def temp_zipfiles(tmp_path_factory): return tmpdir -@pytest.mark.usefixtures("temp_zipfiles") class TestGetLocalPath: def test_stable(self, temp_zipfiles): with mock.patch("irdb.publish.ZIPPED_DIR", temp_zipfiles): @@ -145,7 +144,6 @@ def fixture_default_argv(): @pytest.mark.webtest -@pytest.mark.usefixtures("default_argv", "temp_zipfiles") @pytest.mark.parametrize("argv, called, response", [ (["-u"], False, None), (["-u", "-s", "--no-confirm"], False, None), @@ -171,7 +169,6 @@ def test_call_confirm(default_argv, temp_zipfiles, argv, called, response): assert mock_confirm.called == called -@pytest.mark.usefixtures("default_argv") @pytest.mark.parametrize("argv, called, args", [ ([], False, {}), (["-c"], True, {"stable": False, "keep_version": False}), @@ -191,7 +188,6 @@ def test_make_package_called(default_argv, argv, called, args): mock_mkpkg.assert_not_called() -@pytest.mark.usefixtures("default_argv") @pytest.mark.parametrize("argv, called, args", [ ([], False, {}), (["-u"], True, {"stable": False, "no_confirm": False}), @@ -213,7 +209,6 @@ def test_push_to_server_called(default_argv, argv, called, args): mock_phsvr.assert_not_called() -@pytest.mark.usefixtures("default_argv") def test_multiple_packages(default_argv): argv = ["foo_package", "bar_package", "-c", "-u"] with mock.patch("sys.argv", default_argv + argv): @@ -226,7 +221,6 @@ def test_multiple_packages(default_argv): assert "bar_package" in mock_phsvr.call_args[0] -@pytest.mark.usefixtures("default_argv", "caplog") def test_warning_no_action(default_argv, caplog): warnmsg = ("Neither `compile` nor `upload` option was set. " "No action will be performed.") From 2d957c584b8ed1555d9d3f25e02c1226c2fce1f0 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 14:55:00 +0200 Subject: [PATCH 08/19] Use server utils functions from ScopeSim Those are more advanced anyway and the old publish_utils were initially copy-pasted from an older version there. We already imported ScopeSim anyway so this doesn't affect overall dependencies. --- irdb/publish.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/irdb/publish.py b/irdb/publish.py index 6af41ab2..dfe9ccec 100644 --- a/irdb/publish.py +++ b/irdb/publish.py @@ -14,10 +14,7 @@ import yaml import paramiko -try: - from .publish_utils import _is_stable, get_stable, get_all_package_versions -except ImportError: - from publish_utils import _is_stable, get_stable, get_all_package_versions +from scopesim.server import database as db # After 3.11, can just import UTC directly from datetime UTC = timezone.utc @@ -162,7 +159,7 @@ def _get_local_path(pkg_name: str, stable: bool) -> Path: try: zipped_versions = ( path for path in ZIPPED_DIR.glob(pattern) - if _is_stable(path.stem) == stable + if db._is_stable(path.stem) == stable ) local_path = max(zipped_versions, key=lambda path: path.stem) except ValueError as err: @@ -215,7 +212,7 @@ def _get_server_path(pkg_name: str, local_name: str) -> str: def confirm(pkg_name: str) -> bool: """Ask for explicit user confirmation before pushing stable package.""" try: - current_stable = get_stable(get_all_package_versions()[pkg_name]) + current_stable = db.get_stable(db.get_all_package_versions()[pkg_name]) except (KeyError, ValueError): current_stable = "" From 7d595401e5fba233f5cf8f4a376f5f3978694b2b Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 15:01:42 +0200 Subject: [PATCH 09/19] Remove now-obsolete `publish-utils.py` # Conflicts: # irdb/publish_utils.py --- irdb/publish_utils.py | 147 ------------------------------------------ 1 file changed, 147 deletions(-) delete mode 100644 irdb/publish_utils.py diff --git a/irdb/publish_utils.py b/irdb/publish_utils.py deleted file mode 100644 index 0f4a48b7..00000000 --- a/irdb/publish_utils.py +++ /dev/null @@ -1,147 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Copied directly from ScopeSim. This is obviously not ideal and should use some -form of common sub-package to deal with this... -""" -import re -import logging -from datetime import date -from typing import List, Tuple, Set, Dict -from collections.abc import Iterator, Iterable, Mapping - -from more_itertools import first, last, groupby_transform - -import requests -from requests.packages.urllib3.util.retry import Retry -from requests.adapters import HTTPAdapter -import bs4 - -from scopesim import rc -# from .download_utils import initiate_download, handle_download, handle_unzipping - -_GrpVerType = Mapping[str, Iterable[str]] -_GrpItrType = Iterator[Tuple[str, List[str]]] - - -HTTP_RETRY_CODES = [403, 404, 429, 500, 501, 502, 503] - - -class ServerError(Exception): - """Some error with the server or connection to the server.""" - - -def get_server_folder_contents(dir_name: str, - unique_str: str = ".zip$") -> Iterator[str]: - url = rc.__config__["!SIM.file.server_base_url"] + dir_name - - retry_strategy = Retry(total=2, - status_forcelist=HTTP_RETRY_CODES, - allowed_methods=["GET"]) - adapter = HTTPAdapter(max_retries=retry_strategy) - - try: - with requests.Session() as session: - session.mount("https://", adapter) - result = session.get(url).content - except (requests.exceptions.ConnectionError, - requests.exceptions.RetryError) as error: - logging.error(error) - raise ServerError("Cannot connect to server. " - f"Attempted URL was: {url}.") from error - except Exception as error: - logging.error(("Unhandled exception occured while accessing server." - "Attempted URL was: %s."), url) - logging.error(error) - raise error - - soup = bs4.BeautifulSoup(result, features="lxml") - hrefs = soup.find_all("a", href=True, string=re.compile(unique_str)) - pkgs = (href.string for href in hrefs) - - return pkgs - - -def _parse_raw_version(raw_version: str) -> str: - """Catch initial package version which has no date info - - Set initial package version to basically "minus infinity". - """ - if raw_version in ("", "zip"): - return str(date(1, 1, 1)) - return raw_version.strip(".zip") - - -def _parse_package_version(package: str) -> Tuple[str, str]: - p_name, p_version = package.split(".", maxsplit=1) - return p_name, _parse_raw_version(p_version) - - -def _is_stable(package_version: str) -> bool: - return not package_version.endswith("dev") - - -def get_stable(versions: Iterable[str]) -> str: - """Return the most recent stable (not "dev") version.""" - return max(version for version in versions if _is_stable(version)) - - -def group_package_versions(all_packages: Iterable[Tuple[str, str]]) -> _GrpItrType: - """Group different versions of packages by package name""" - version_groups = groupby_transform(sorted(all_packages), - keyfunc=first, - valuefunc=last, - reducefunc=list) - return version_groups - - -def crawl_server_dirs() -> Iterator[Tuple[str, Set[str]]]: - """Search all folders on server for .zip files""" - for dir_name in get_server_folder_contents("", "/"): - logging.info("Searching folder '%s'", dir_name) - try: - p_dir = get_server_folder_package_names(dir_name) - except ValueError as err: - logging.info(err) - continue - logging.info("Found packages %s.", p_dir) - yield dir_name, p_dir - - -def get_all_package_versions() -> Dict[str, List[str]]: - """Gather all versions for all packages present in any folder on server""" - grouped = {} - folders = list(dict(crawl_server_dirs()).keys()) - for dir_name in folders: - p_list = [_parse_package_version(package) for package - in get_server_folder_contents(dir_name)] - grouped.update(group_package_versions(p_list)) - return grouped - - -def get_server_folder_package_names(dir_name: str) -> Set[str]: - """ - Retrieve all unique package names present on server in `dir_name` folder. - - Parameters - ---------- - dir_name : str - Name of the folder on the server. - - Raises - ------ - ValueError - Raised if no valid packages are found in the given folder. - - Returns - ------- - package_names : set of str - Set of unique package names in `dir_name` folder. - - """ - package_names = {package.split(".", maxsplit=1)[0] for package - in get_server_folder_contents(dir_name)} - - if not package_names: - raise ValueError(f"No packages found in directory \"{dir_name}\".") - - return package_names From bb9f963dfdc136d0a5ffa6a5d20fd8744393e163 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 15:08:19 +0200 Subject: [PATCH 10/19] Rework dependecies --- requirements.github_actions.txt | 1 - requirements.readthedocs.txt | 3 --- setup.py | 7 +------ 3 files changed, 1 insertion(+), 10 deletions(-) diff --git a/requirements.github_actions.txt b/requirements.github_actions.txt index 455220bc..02b9f7f3 100644 --- a/requirements.github_actions.txt +++ b/requirements.github_actions.txt @@ -4,7 +4,6 @@ matplotlib astropy pyyaml paramiko -photutils scopesim scopesim_templates jupytext diff --git a/requirements.readthedocs.txt b/requirements.readthedocs.txt index 4343d8ef..3ba1d264 100644 --- a/requirements.readthedocs.txt +++ b/requirements.readthedocs.txt @@ -4,11 +4,8 @@ matplotlib astropy docutils -requests -beautifulsoup4 pyyaml -# git+https://github.com/AstarVienna/ScopeSim.git@master_rtd scopesim sphinx>=4.3.0 diff --git a/setup.py b/setup.py index dee36f82..5c66b71f 100644 --- a/setup.py +++ b/setup.py @@ -34,15 +34,10 @@ def setup_package(): "matplotlib>=1.5", "docutils", - "requests>=2.20", - "beautifulsoup4>=4.4", - "lxml[html_clean]", "pyyaml>5.1", "paramiko", - "synphot>=0.1.3", - "skycalc_ipy>=0.1.3", - "anisocado", + "scopesim>=0.10.1", ], ) From a5a916b84e71053fab90350e67a062ee585e1e0c Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 22 Aug 2025 15:25:58 +0200 Subject: [PATCH 11/19] Use more directly from ScopeSim --- irdb/publish.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/irdb/publish.py b/irdb/publish.py index dfe9ccec..5c5b8dae 100644 --- a/irdb/publish.py +++ b/irdb/publish.py @@ -101,13 +101,17 @@ def make_package(pkg_name: str, stable: bool = False, Name of the package's compiled zip file. """ - suffix = ".dev" if not stable else "" + suffix = "dev" if not stable else None pkg_version_path = PKGS_DIR / pkg_name / "version.yaml" if not keep_version: # Collect the info for the version.yaml file time = dt.now(UTC) version_dict = { - "version": f"{time.date()}{suffix}", + "version": ( + f"{time.date()}.{suffix}" + if suffix is not None + else str(time.date()) + ), "timestamp": time.strftime("%Y-%m-%d %H:%M:%S"), "release": "stable" if stable else "dev", } @@ -121,7 +125,7 @@ def make_package(pkg_name: str, stable: bool = False, time = dt.fromisoformat(version_dict["timestamp"]) # Make the zip file - zip_name = f"{pkg_name}.{time.date()}{suffix}.zip" + zip_name = db._unparse_package_version(pkg_name, time.date(), suffix) zip_package_folder(pkg_name, zip_name) logging.info( "[%s]: Compiled package: %s", From db49f48ab3067b9a25f76dec673f0221ccb27ce4 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 28 Nov 2025 11:59:27 +0100 Subject: [PATCH 12/19] Remove long-obsolete and unused `OLD_tests` --- irdb/tests/OLD_tests/__init__.py | 0 irdb/tests/OLD_tests/packages.yaml | 16 --- irdb/tests/OLD_tests/tst_ascii_readablitly.py | 129 ------------------ irdb/tests/OLD_tests/tst_psf_headers.py | 40 ------ 4 files changed, 185 deletions(-) delete mode 100644 irdb/tests/OLD_tests/__init__.py delete mode 100644 irdb/tests/OLD_tests/packages.yaml delete mode 100644 irdb/tests/OLD_tests/tst_ascii_readablitly.py delete mode 100644 irdb/tests/OLD_tests/tst_psf_headers.py diff --git a/irdb/tests/OLD_tests/__init__.py b/irdb/tests/OLD_tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/irdb/tests/OLD_tests/packages.yaml b/irdb/tests/OLD_tests/packages.yaml deleted file mode 100644 index fc49bb3b..00000000 --- a/irdb/tests/OLD_tests/packages.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# Packages to be tested -ELT : - dir : ELT - ignore_files : [] - -METIS : - dir : METIS - ignore_files : [] - -MICADO : - dir : MICADO - ignore_files : [] - -test_package : - dir : test_package - ignore_files : [] diff --git a/irdb/tests/OLD_tests/tst_ascii_readablitly.py b/irdb/tests/OLD_tests/tst_ascii_readablitly.py deleted file mode 100644 index 817ce410..00000000 --- a/irdb/tests/OLD_tests/tst_ascii_readablitly.py +++ /dev/null @@ -1,129 +0,0 @@ -import os -import inspect -import warnings -from glob import glob - -import yaml -from astropy.io import ascii as ioascii -from astropy.table import Table - -# Tests for the ascii files: -# 1. Test astropy.io.ascii can return a table object for each file -# 2. Test that each ASCII header can be converted to a dictionary by pyYAML -# 3. Specific tests for each type of ASCII file - -cur_frame = os.path.dirname(inspect.getfile(inspect.currentframe())) -HOME = os.path.abspath(os.path.join(cur_frame, "../")) -REPORTS = os.path.abspath(os.path.join(HOME, "_REPORTS")) - -with open(os.path.join(HOME, "packages.yaml")) as f: - PKGS_DICT = yaml.full_load(f) - - -def get_ascii_files_in_package(pkg_dir): - if not os.path.exists(pkg_dir): - raise ValueError("{} doesn't exist".format(pkg_dir)) - - ascii_tags = [".dat", ".tbl"] - files = [] - for tag in ascii_tags: - files += glob(os.path.join(pkg_dir, "*"+tag)) - - return files - - -def convert_table_comments_to_dict(tbl): - - comments_dict = None - if "comments" in tbl.meta: - try: - comments_dict = yaml.full_load("\n".join(tbl.meta["comments"])) - except: - warnings.warn("Couldn't convert .meta['comments'] to dict") - comments_dict = tbl.meta["comments"] - else: - warnings.warn("No comments in table") - - return comments_dict - - -def write_report(filename, dic, tag): - passing_url = "[![](https://img.shields.io/badge/{}-passing-green.svg)]()" - failing_url = "[![](https://img.shields.io/badge/{}-failing-red.svg)]()" - - with open(os.path.join(REPORTS, filename), "w") as f: - f.write("# REPORT : {} \n\n".format(tag.replace("_", " "))) - - for pkg, files in dic.items(): - f.write("# ``{}`` package\n\n".format(pkg)) - if len(files) > 0: - f.write(failing_url.format(tag) + "\n\n") - f.write("The following files have headers which are not in the " - "YAML format: \n\n") - for file in files: - f.write("- ``{}``\n".format(file)) - else: - f.write(passing_url.format(tag) + "\n\n") - f.write("All ASCII file headers are in the YAML format\n\n") - f.write("\n\n") - - -def test_all_ascii_files_readable_by_astropy_io_ascii(): - - tbl_failed_dict = {} - meta_failed_dict = {} - colname_failed_dict = {} - - for pkg in PKGS_DICT: - - tbl_passed = [] - tbl_failed = [] - - meta_passed = [] - meta_failed = [] - - colname_passed = [] - colname_failed = [] - - test_dir = PKGS_DICT[pkg]["dir"] - test_files = get_ascii_files_in_package(os.path.join(HOME, test_dir)) - - for file in test_files: - tbl = ioascii.read(file, fast_reader=False) - print(file) - if isinstance(tbl, Table): - tbl_passed += [file] - else: - tbl_failed += [file] - - meta = convert_table_comments_to_dict(tbl) - if isinstance(meta, dict): - meta_passed += [file] - else: - meta_failed += [file] - - if tbl.colnames[0] != "col0": - colname_passed += [file] - else: - colname_failed += [file] - - tbl_failed_dict[pkg] = tbl_failed - meta_failed_dict[pkg] = meta_failed - colname_failed_dict[pkg] = colname_failed - - write_report("failed_ascii_table.md", tbl_failed_dict, "ASCII_table_format") - write_report("failed_ascii_meta.md", meta_failed_dict, "ASCII_meta_format") - write_report("failed_ascii_colnames.md", meta_failed_dict, "ASCII_colnames") - - print("Tables failing to be read") - print(tbl_failed_dict) - print("Meta data failing to be read") - print(meta_failed_dict) - print("Table column names failing to be read") - print(colname_failed_dict) - - for pkg in tbl_failed_dict: - assert len(tbl_failed_dict[pkg]) == 0 - - for pkg in meta_failed_dict: - assert len(meta_failed_dict[pkg]) == 0 diff --git a/irdb/tests/OLD_tests/tst_psf_headers.py b/irdb/tests/OLD_tests/tst_psf_headers.py deleted file mode 100644 index c1a86749..00000000 --- a/irdb/tests/OLD_tests/tst_psf_headers.py +++ /dev/null @@ -1,40 +0,0 @@ -import os -import glob -import warnings - -import numpy as np -from astropy.io import fits - - -def check_all_psf_files_have_correct_header_keywords_in_exts(): - psf_files = glob.glob("C:\Work\irdb\_PSFs\*.fits") - - ext_keys = ["CDELT1", "CDELT2", "CRPIX1", "CRPIX2", "CRVAL1", "CRVAL2", - "CUNIT1", "CUNIT2", "CTYPE1", "CTYPE2", "WAVE0", "PIXELSCL"] - ext_keys_arr = np.array(ext_keys) - - incomplete_pfs_files = {} - - for fname in psf_files: - with fits.open(fname) as hdulist: - for ext, hdu in enumerate(hdulist): - if isinstance(hdu, (fits.ImageHDU, fits.PrimaryHDU)) and \ - hdu.data is not None and "CATTYPE" not in hdu.header: - key_mask = [key in hdu.header for key in ext_keys] - if not all(key_mask): - inv_mask = np.invert(key_mask) - ext_name = "{}[{}]".format(fname, ext) - missing_keys = ext_keys_arr[inv_mask] - msg = "{} is missing keywords: {}" \ - "".format(ext_name, missing_keys) - warnings.warn(msg) - - incomplete_pfs_files[ext_name] = list(missing_keys) - - return incomplete_pfs_files - - -def test_psf_fits_headers(): - if os.environ["USERNAME"] == "Kieran": - psf_dict = check_all_psf_files_have_correct_header_keywords_in_exts() - assert len(psf_dict) == 0 From 7170f61531e95e50ec624d7bcdeeeb54452e60d3 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 28 Nov 2025 12:04:38 +0100 Subject: [PATCH 13/19] Remove obsolete `packages.yaml` --- irdb/packages.yaml | 64 ---------------------------------------------- 1 file changed, 64 deletions(-) delete mode 100644 irdb/packages.yaml diff --git a/irdb/packages.yaml b/irdb/packages.yaml deleted file mode 100644 index 7169d8b2..00000000 --- a/irdb/packages.yaml +++ /dev/null @@ -1,64 +0,0 @@ -Armazones: - latest: Armazones.2022-04-26 - path: locations - stable: Armazones.2022-04-26 -ELT: - latest: ELT.2022-04-26 - path: telescopes - stable: ELT.2022-04-26 -GTC: - latest: GTC - path: telescopes - stable: GTC -HAWKI: - latest: HAWKI.2023-06-14 - path: instruments - stable: HAWKI.2023-06-14 -HST: - latest: HST - path: telescopes - stable: HST -LFOA: - latest: LFOA.2022-04-12 - path: telescopes - stable: LFOA.2022-04-12 -LaPalma: - latest: LaPalma - path: locations - stable: LaPalma -METIS: - latest: METIS.2022-04-26 - path: instruments - stable: METIS.2022-04-26 -MICADO: - latest: MICADO.2023-07-14.dev - path: instruments - stable: MICADO.2023-03-08 -MICADO_Sci: - latest: MICADO_Sci - path: instruments - stable: MICADO_Sci -MORFEO: - latest: MORFEO.2022-04-26 - path: instruments - stable: MORFEO.2022-04-26 -OSIRIS: - latest: OSIRIS - path: instruments - stable: OSIRIS -Paranal: - latest: Paranal.2022-04-09 - path: locations - stable: Paranal.2022-04-09 -VLT: - latest: VLT - path: telescopes - stable: VLT -WFC3: - latest: WFC3 - path: instruments - stable: WFC3 -test_package: - latest: test_package.2022-07-11.dev - path: instruments - stable: test_package.2022-07-11 From 9ae41d7bb366613675522fa2a5fee9304ed72d88 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 28 Nov 2025 12:05:17 +0100 Subject: [PATCH 14/19] Add MOSAIC to `server_folders.yaml`, rm MICADO_Sci --- irdb/server_folders.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/irdb/server_folders.yaml b/irdb/server_folders.yaml index 116904cf..5fb0a626 100644 --- a/irdb/server_folders.yaml +++ b/irdb/server_folders.yaml @@ -8,8 +8,8 @@ LaPalma: locations MAORY: instruments METIS: instruments MICADO: instruments -MICADO_Sci: instruments MORFEO: instruments +MOSAIC: instruments OSIRIS: instruments Paranal: locations VLT: telescopes From 9eeb2d15a55975df3bcab5b7d72f2711c2e4a837 Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 28 Nov 2025 12:06:17 +0100 Subject: [PATCH 15/19] Remove obsolete code from `__init__.py` Add minimal docstring instead... --- irdb/__init__.py | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/irdb/__init__.py b/irdb/__init__.py index c42a9b52..c1ed5804 100644 --- a/irdb/__init__.py +++ b/irdb/__init__.py @@ -1,16 +1,2 @@ -from os import path as p -import yaml -from tempfile import TemporaryDirectory - -dname = p.dirname(__file__) -with open(p.join(dname, "packages.yaml")) as f: - PKGS = yaml.full_load(f) - - -# I was thinking this could work as a way of doing an on-the-fly import for -# any of the packages. But this seems not to work in this state. -# def __getattr__(name): -# if name not in PKGS: -# raise ImportError(f"{name} not in irdb.PKGS dictionary") -# -# return PKGS[name] +# -*- coding: utf-8 -*- +"""Internal functionality for CI/CD etc.""" From b05b82cb22bde55d0c6783c886c74946e9e7293e Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 28 Nov 2025 12:24:57 +0100 Subject: [PATCH 16/19] Add astar-utils to dependencies --- requirements.github_actions.txt | 1 + requirements.readthedocs.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/requirements.github_actions.txt b/requirements.github_actions.txt index 02b9f7f3..9ed7f400 100644 --- a/requirements.github_actions.txt +++ b/requirements.github_actions.txt @@ -4,6 +4,7 @@ matplotlib astropy pyyaml paramiko +astar-utils scopesim scopesim_templates jupytext diff --git a/requirements.readthedocs.txt b/requirements.readthedocs.txt index 3ba1d264..a0d06a02 100644 --- a/requirements.readthedocs.txt +++ b/requirements.readthedocs.txt @@ -6,6 +6,7 @@ astropy docutils pyyaml +astar-utils scopesim sphinx>=4.3.0 From 022d7dd83fbeb90a159c4d8282ab38b7ceda54bb Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 28 Nov 2025 12:26:38 +0100 Subject: [PATCH 17/19] Finally replace ancient `SystemDict` with `NestedMapping` --- irdb/badges.py | 6 +-- irdb/system_dict.py | 98 --------------------------------------- irdb/tests/test_badges.py | 14 ++++-- 3 files changed, 13 insertions(+), 105 deletions(-) delete mode 100644 irdb/system_dict.py diff --git a/irdb/badges.py b/irdb/badges.py index 641a2e01..3bd66052 100644 --- a/irdb/badges.py +++ b/irdb/badges.py @@ -15,7 +15,7 @@ import yaml -from irdb.system_dict import SystemDict +from astar_utils import NestedMapping # After 3.11, can just import UTC directly from datetime UTC = timezone.utc @@ -138,7 +138,7 @@ def __init__(self, key: str, value: str): super().__init__(key, value) -class BadgeReport(SystemDict): +class BadgeReport(NestedMapping): """Context manager class for collection and generation of report badges. Intended usage is in a pytest fixture with a scope that covers all tests @@ -156,7 +156,7 @@ class BadgeReport(SystemDict): >>> def test_something(self, badges): >>> badges[f"!foo.bar.baz"] = "OK" - Because `BadgeReport` inherits from ``SystemDict``, the use of '!'-type + Because `BadgeReport` inherits from ``NestedMapping``, the use of '!'-type "bang-strings" is supported. Additionally, any logging generated within a test can be captured and diff --git a/irdb/system_dict.py b/irdb/system_dict.py deleted file mode 100644 index 5d794ec4..00000000 --- a/irdb/system_dict.py +++ /dev/null @@ -1,98 +0,0 @@ -import warnings - - -class SystemDict(object): - def __init__(self, new_dict=None): - self.dic = {} - if isinstance(new_dict, dict): - self.update(new_dict) - elif isinstance(new_dict, list): - for entry in new_dict: - self.update(entry) - - def update(self, new_dict): - if isinstance(new_dict, dict) \ - and "alias" in new_dict \ - and "properties" in new_dict: - alias = new_dict["alias"] - if alias in self.dic: - self.dic[alias] = recursive_update(self.dic[alias], - new_dict["properties"]) - else: - self.dic[alias] = new_dict["properties"] - else: - self.dic = recursive_update(self.dic, new_dict) - - def __getitem__(self, item): - if isinstance(item, str) and item[0] == "!": - item_chunks = item[1:].split(".") - entry = self.dic - for item in item_chunks: - # This if-statement may cause issues - if item not in entry: - entry[item] = {} - entry = entry[item] - return entry - else: - return self.dic[item] - - def __setitem__(self, key, value): - if isinstance(key, str) and key[0] == "!": - key_chunks = key[1:].split(".") - entry = self.dic - for key in key_chunks[:-1]: - if key not in entry: - entry[key] = {} - entry = entry[key] - entry[key_chunks[-1]] = value - else: - self.dic[key] = value - - def __contains__(self, item): - if isinstance(item, str) and item[0] == "!": - item_chunks = item[1:].split(".") - entry = self.dic - for item in item_chunks: - if not isinstance(entry, dict) or item not in entry: - return False - entry = entry[item] - return True - else: - return item in self.dic - - def __repr__(self): - msg = " contents:" - for key in self.dic.keys(): - val = self.dic[key] - msg += "\n{}: ".format(key) - if isinstance(val, dict): - for subkey in val.keys(): - msg += "\n {}: {}".format(subkey, val[subkey]) - else: - msg += "{}\n".format(val) - return msg - - -def recursive_update(old_dict, new_dict): - if new_dict is not None: - for key in new_dict: - if key in old_dict: - if isinstance(old_dict[key], dict): - if isinstance(new_dict[key], dict): - old_dict[key] = recursive_update(old_dict[key], - new_dict[key]) - else: - warnings.warn("Overwriting dict: {} with non-dict: {}" - "".format(old_dict[key], new_dict[key])) - old_dict[key] = new_dict[key] - else: - if isinstance(new_dict[key], dict): - warnings.warn("Overwriting non-dict: {} with dict: {}" - "".format(old_dict[key], new_dict[key])) - old_dict[key] = new_dict[key] - else: - old_dict[key] = new_dict[key] - - return old_dict - - diff --git a/irdb/tests/test_badges.py b/irdb/tests/test_badges.py index 0b422737..4d134455 100644 --- a/irdb/tests/test_badges.py +++ b/irdb/tests/test_badges.py @@ -10,9 +10,15 @@ import yaml import pytest -from irdb.badges import BadgeReport, Badge, BoolBadge, NumBadge, StrBadge, \ - MsgOnlyBadge -from irdb.system_dict import SystemDict +from irdb.badges import ( + BadgeReport, + Badge, + BoolBadge, + NumBadge, + StrBadge, + MsgOnlyBadge, +) +from astar_utils import NestedMapping @pytest.fixture(name="temp_dir", scope="module") @@ -113,7 +119,7 @@ def test_yaml_content(self, temp_dir): report["!foo.bar"] = "bogus" path = temp_dir / "_REPORTS/test.yaml" with path.open(encoding="utf-8") as file: - dic = SystemDict(yaml.full_load(file)) + dic = NestedMapping(yaml.full_load(file)) assert "!foo.bar" in dic assert dic["!foo.bar"] == "bogus" From 04c7f6a91e803665fd247f2db055f8234af776ba Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 28 Nov 2025 12:27:05 +0100 Subject: [PATCH 18/19] Remove long-deprecated functions --- irdb/badges.py | 77 -------------------------------------------------- 1 file changed, 77 deletions(-) diff --git a/irdb/badges.py b/irdb/badges.py index 3bd66052..43971f14 100644 --- a/irdb/badges.py +++ b/irdb/badges.py @@ -270,79 +270,6 @@ def generate_report(self) -> None: make_entries(file, self.dic) -def load_badge_yaml(filename=None): - """ - Gets the badge yaml file - should be called at the beginning of a test file - - Parameters - ---------- - filename : str - Defaults to /_REPORTS/badges.yaml - - Returns - ------- - badges : SystemDict - - """ - warn(("Using this function directly is deprecated, use BadgeReport " - "context manager instead."), DeprecationWarning, stacklevel=2) - if filename is None: - filename = "badges.yaml" - - badges = SystemDict() - - try: - with Path(PKG_DIR, "_REPORTS", filename).open(encoding="utf-8") as file: - badges.update(yaml.full_load(file)) - except FileNotFoundError: - logging.warning("%s not found, init empty dict", filename) - - return badges - - -def write_badge_yaml(badge_yaml, filename=None): - """ - Writes the badges yaml dict out to file - should be called during teardown - - Parameters - ---------- - badge_yaml : SystemDict - The dictionary of badges. - - filename : str - Defaults to /_REPORTS/badges.yaml - - """ - warn(("Using this function directly is deprecated, use BadgeReport " - "context manager instead."), DeprecationWarning, stacklevel=2) - if filename is None: - filename = "badges.yaml" - - if isinstance(badge_yaml, SystemDict): - badge_yaml = badge_yaml.dic - - path = Path(PKG_DIR, "_REPORTS", filename) - path.write_text(yaml.dump(badge_yaml), encoding="utf-8") - - -def make_badge_report(badge_filename=None, report_filename=None): - """ - Generates the badges.md file which describes the state of the packages - """ - warn(("Using this function directly is deprecated, use BadgeReport " - "context manager instead."), DeprecationWarning, stacklevel=2) - if badge_filename is None: - badge_filename = "badges.yaml" - if report_filename is None: - report_filename = "badges.md" - - badge_dict = load_badge_yaml(badge_filename) - - path = Path(PKG_DIR, "_REPORTS", report_filename) - with path.open("w", encoding="utf-8") as file: - make_entries(file, badge_dict.dic) - - def _get_nested_header(key: str, level: int) -> str: if level > 2: return f"* {key}: " @@ -382,7 +309,3 @@ def make_entries(stream: TextIO, entry, level=0) -> None: if level > 1: stream.write("* ") Badge(key, value).write(stream) - - -if __name__ == "__main__": - make_badge_report() From bf50006f104007d2e7b9d3482a9e0ee5491bbbee Mon Sep 17 00:00:00 2001 From: teutoburg Date: Fri, 28 Nov 2025 12:27:27 +0100 Subject: [PATCH 19/19] More docstrings and formatting stuff --- irdb/badges.py | 29 ++++++++++++++++++----------- irdb/utils.py | 17 +++++++++-------- 2 files changed, 27 insertions(+), 19 deletions(-) diff --git a/irdb/badges.py b/irdb/badges.py index 43971f14..0479ef60 100644 --- a/irdb/badges.py +++ b/irdb/badges.py @@ -1,11 +1,8 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -""" -Everything to do with report badges and more! -""" +"""Everything to do with report badges and more.""" import logging -from warnings import warn from pathlib import Path from typing import TextIO from numbers import Number @@ -70,6 +67,7 @@ class Badge(): colour : str The (auto-assigned) colour of the badge. """ + pattern = Template("[![](https://img.shields.io/badge/$key-$val-$col)]()") colour = "lightgrey" @@ -89,14 +87,16 @@ def __init__(self, key: str, value): self.value = _fix_badge_str(value) if isinstance(value, str) else value def write(self, stream: TextIO) -> None: - """Write formatted pattern to I/O stream""" + """Write formatted pattern to I/O stream.""" _dict = {"key": self.key, "val": self.value, "col": self.colour} stream.write(self.pattern.substitute(_dict)) class BoolBadge(Badge): """Key-value Badge for bool values, True -> green, False -> red.""" + colour = "red" + def __init__(self, key: str, value: bool): super().__init__(key, value) if self.value: @@ -105,11 +105,13 @@ def __init__(self, key: str, value: bool): class NumBadge(Badge): """Key-value Badge for numerical values, lightblue.""" + colour = "lightblue" class StrBadge(Badge): """Key-value Badge for string values, colour based on special strings.""" + special_strings = { "observation": "blueviolet", "support": "deepskyblue", @@ -131,6 +133,7 @@ def __init__(self, key: str, value: str): class MsgOnlyBadge(StrBadge): """Key-only Badge for string values, colour based on special strings.""" + pattern = Template("[![](https://img.shields.io/badge/$key-$col)]()") def __init__(self, key: str, value: str): @@ -199,12 +202,14 @@ class BadgeReport(NestedMapping): logs : list of logging.LogRecord List of logging.LogRecord objects to be saved to `logs_filename`. """ - def __init__(self, - filename: str = "badges.yaml", - report_filename: str = "badges.md", - logs_filename: str = "badge_report_log.txt", - save_logs: bool = True, - ): + + def __init__( + self, + filename: str = "badges.yaml", + report_filename: str = "badges.md", + logs_filename: str = "badge_report_log.txt", + save_logs: bool = True, + ) -> None: logging.debug("REPORT INIT") base_path = Path(PKG_DIR, "_REPORTS") @@ -221,6 +226,7 @@ def __init__(self, super().__init__() def __enter__(self): + """Context manager setup.""" logging.debug("REPORT ENTER") # try: # # TODO: WHY do we actually load this first? It caused some issues @@ -234,6 +240,7 @@ def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_traceback): + """Context manager teardown.""" logging.debug("REPORT EXIT") self.write_yaml() self.generate_report() diff --git a/irdb/utils.py b/irdb/utils.py index a0984d75..29089c4c 100644 --- a/irdb/utils.py +++ b/irdb/utils.py @@ -1,16 +1,15 @@ -import os -from os import path as pth +# -*- coding: utf-8 -*- +"""TBA.""" + from pathlib import Path -import yaml -from irdb.system_dict import SystemDict PKG_DIR = Path(__file__).parent.parent def get_packages(): """ - Returns a dictionary with all packages in the IRDB + Return a dictionary with all packages in the IRDB. Returns ------- @@ -25,15 +24,17 @@ def get_packages(): # to 'slip under the radar' by the tests, and also defeated the # purpose of test_all_packages_have_a_self_named_yaml. # if (pkg_path / f"{pkg_path.name}.yaml").exists(): - if (pkg_path.is_dir() + if ( + pkg_path.is_dir() and not pkg_path.name.startswith((".", "_")) - and not pkg_path.name in specials): + and pkg_path.name not in specials + ): yield pkg_path.name, pkg_path def recursive_filename_search(entry): """ - Search through a yaml dict looking for the keyword "filename" + Search through a yaml dict looking for the keyword "filename". Parameters ----------