diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e801ac29..4a5bd8f7 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -58,6 +58,21 @@ jobs: name: badge-report path: _REPORTS + irdb_test: + name: Run internal tests + runs-on: ubuntu-latest + if: contains(github.event.pull_request.labels.*.name, 'irdb functionality') + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.12 + - name: Install dependencies + run: pip install -r requirements.github_actions.txt + - name: Run Pytest for badges + run: pytest -m "irdb" + tests_devmaster: name: Test against ScopeSim main or PR branch runs-on: ${{ matrix.os }} diff --git a/irdb/badges.py b/irdb/badges.py deleted file mode 100644 index 641a2e01..00000000 --- a/irdb/badges.py +++ /dev/null @@ -1,388 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -""" -Everything to do with report badges and more! -""" - -import logging -from warnings import warn -from pathlib import Path -from typing import TextIO -from numbers import Number -from string import Template -from datetime import datetime as dt, timezone -from collections.abc import Mapping - -import yaml - -from irdb.system_dict import SystemDict - -# After 3.11, can just import UTC directly from datetime -UTC = timezone.utc - -PKG_DIR = Path(__file__).parent.parent - - -def _fix_badge_str(badge_str: str) -> str: - """Eliminate any spaces and single dashes in badge string.""" - return badge_str.replace(" ", "_").replace("-", "--") - - -class Badge(): - """Base class for markdown report badges. - - Based on the type and (in case of strings) value of the parameter `value`, - the appropriate subclass is returned, which also deals with the colour of - the badge. These subclasses should *not* be instantiated directly, but - rather this base class should always be used. - - In the case of a string-type `value`, the colour of the badge is based on - a set of special strings, e.g. red for 'error' or green for 'found'. - A complete list of these special strings can be accessed via - ``StrBadge.special_strings``. The default colour for any string value not - listed as a special string is lightgrey. - - By default, all badges appear as "key/label-value" badges with a grey label - on the left side and a coloured value on the right side. For simple - messages, it is also possible to produce a "message-only" badge. This can - simply be done by adding a leading '!' to the (string) `value` parameter. - The message of the badge is then only the `key` parameter, while the colour - of the badge is again decided by the special strings, after the leading '!' - is stripped. - - Any spaces or single dashes present in either `key` or `value` are - automatically replaced by underscores or double dashes, respectively, to - comply with the format requirements for the badges. - - It is possible to manually change the colour of any badge after creation - by setting the desired colour (string) for the `colour` attribute. - - Parameters - ---------- - key : str - Dictionary key, become (left-side) label of the badge. - value : str, bool, int or float - Dictionary key, become (right-side) value of the badge. - Subclass dispatch is decided based on type and value of this parameter. - - Attributes - ---------- - colour : str - The (auto-assigned) colour of the badge. - """ - pattern = Template("[![](https://img.shields.io/badge/$key-$val-$col)]()") - colour = "lightgrey" - - def __new__(cls, key: str, value): - if isinstance(value, bool): - return super().__new__(BoolBadge) - if isinstance(value, Number): - return super().__new__(NumBadge) - if isinstance(value, str): - if value.startswith("!"): - return super().__new__(MsgOnlyBadge) - return super().__new__(StrBadge) - raise TypeError(value) - - def __init__(self, key: str, value): - self.key = _fix_badge_str(key) - self.value = _fix_badge_str(value) if isinstance(value, str) else value - - def write(self, stream: TextIO) -> None: - """Write formatted pattern to I/O stream""" - _dict = {"key": self.key, "val": self.value, "col": self.colour} - stream.write(self.pattern.substitute(_dict)) - - -class BoolBadge(Badge): - """Key-value Badge for bool values, True -> green, False -> red.""" - colour = "red" - def __init__(self, key: str, value: bool): - super().__init__(key, value) - if self.value: - self.colour = "green" - - -class NumBadge(Badge): - """Key-value Badge for numerical values, lightblue.""" - colour = "lightblue" - - -class StrBadge(Badge): - """Key-value Badge for string values, colour based on special strings.""" - special_strings = { - "observation": "blueviolet", - "support": "deepskyblue", - "error": "red", - "missing": "red", - "warning": "orange", - "conflict": "orange", - "incomplete": "orange", - "ok": "green", - "found": "green", - "not_found": "red", - "none": "yellowgreen", - } - - def __init__(self, key: str, value: str): - super().__init__(key, value) - self.colour = self.special_strings.get(self.value.lower(), "lightgrey") - - -class MsgOnlyBadge(StrBadge): - """Key-only Badge for string values, colour based on special strings.""" - pattern = Template("[![](https://img.shields.io/badge/$key-$col)]()") - - def __init__(self, key: str, value: str): - value = value.removeprefix("!") - super().__init__(key, value) - - -class BadgeReport(SystemDict): - """Context manager class for collection and generation of report badges. - - Intended usage is in a pytest fixture with a scope that covers all tests - that should be included in that report file: - - >>> import pytest - >>> - >>> @pytest.fixture(name="badges", scope="module") - >>> def fixture_badges(): - >>> with BadgeReport() as report: - >>> yield report - - This fixture can then be used inside the tests like a dictionary: - - >>> def test_something(self, badges): - >>> badges[f"!foo.bar.baz"] = "OK" - - Because `BadgeReport` inherits from ``SystemDict``, the use of '!'-type - "bang-strings" is supported. - - Additionally, any logging generated within a test can be captured and - stored in the report, to be written in a separate log file at teardown: - - >>> import logging - >>> - >>> def test_something_else(self, badges, caplog): - >>> logging.warning("Oh no!") - >>> badges.logs.extend(caplog.records) - - Note the use of ``caplog.records`` to access the ``logging.LogRecord`` - objects rather then the string output, as `BadgeReport` performs very basic - custom formatting. Further note the use of ``logs.extend()``, because - ``caplog.records`` returns a ``list``, to not end up with nested lists. - - The level of logging recorded is controlled by the logging settings in the - test script. `BadgeReport` handles all ``logging.LogRecord`` objects in - the final `.logs` list. - - Parameters - ---------- - filename : str, optional - Name for yaml file, should end in '.yaml. The default is "badges.yaml". - report_filename : str, optional - Name for report file, should end in '.md'. The default is "badges.md". - logs_filename : str, optional - Name for log file. The default is "badge_report_log.txt". - save_logs : bool, optional - Whether to output logs. The default is True. - - Attributes - ---------- - yamlpath : Path - Full path for yaml file. - report_path : Path - Full path for report file. - log_path : Path - Full path for log file. - logs : list of logging.LogRecord - List of logging.LogRecord objects to be saved to `logs_filename`. - """ - def __init__(self, - filename: str = "badges.yaml", - report_filename: str = "badges.md", - logs_filename: str = "badge_report_log.txt", - save_logs: bool = True, - ): - logging.debug("REPORT INIT") - base_path = Path(PKG_DIR, "_REPORTS") - - self.filename = filename - self.yamlpath = base_path / self.filename - self.report_name = report_filename - self.report_path = base_path / self.report_name - - self.save_logs = save_logs - self.logs = [] - logs_name = logs_filename or "badge_report_log.txt" - self.log_path = base_path / logs_name - - super().__init__() - - def __enter__(self): - logging.debug("REPORT ENTER") - # try: - # # TODO: WHY do we actually load this first? It caused some issues - # # with 'old' badges that are not cleared. Is there any good - # # reason at all to load the previous yaml file??? - # with self.yamlpath.open(encoding="utf-8") as file: - # self.update(yaml.full_load(file)) - # except FileNotFoundError: - # logging.warning("%s not found, init empty dict", self.yamlpath) - logging.debug("Init emtpy dict.") - return self - - def __exit__(self, exc_type, exc_value, exc_traceback): - logging.debug("REPORT EXIT") - self.write_yaml() - self.generate_report() - if self.save_logs: - self.write_logs() - logging.debug("REPORT DONE") - - def write_logs(self) -> None: - """Dump logs to file (`logs_filename`).""" - with self.log_path.open("w", encoding="utf-8") as file: - for log in self.logs: - file.write(f"{log.levelname}::{log.message}\n") - - def write_yaml(self) -> None: - """Dump dict to yaml file (`filename`).""" - dumpstr = yaml.dump(self.dic, sort_keys=False) - self.yamlpath.write_text(dumpstr, encoding="utf-8") - - def _make_preamble(self) -> str: - preamble = ("# IRDB Packages Report\n\n" - f"**Created on UTC {dt.now(UTC):%Y-%m-%d %H:%M:%S}**\n\n" - "For details on errors and conflicts, see badge report " - "log file in this directory.\n\n") - return preamble - - def generate_report(self) -> None: - """Write markdown badge report to `report_filename`.""" - if not self.report_path.suffix == ".md": - logging.warning(("Expected '.md' suffix for report file name, but " - "found %s. Report file might not be readable."), - self.report_path.suffix) - with self.report_path.open("w", encoding="utf-8") as file: - file.write(self._make_preamble()) - make_entries(file, self.dic) - - -def load_badge_yaml(filename=None): - """ - Gets the badge yaml file - should be called at the beginning of a test file - - Parameters - ---------- - filename : str - Defaults to /_REPORTS/badges.yaml - - Returns - ------- - badges : SystemDict - - """ - warn(("Using this function directly is deprecated, use BadgeReport " - "context manager instead."), DeprecationWarning, stacklevel=2) - if filename is None: - filename = "badges.yaml" - - badges = SystemDict() - - try: - with Path(PKG_DIR, "_REPORTS", filename).open(encoding="utf-8") as file: - badges.update(yaml.full_load(file)) - except FileNotFoundError: - logging.warning("%s not found, init empty dict", filename) - - return badges - - -def write_badge_yaml(badge_yaml, filename=None): - """ - Writes the badges yaml dict out to file - should be called during teardown - - Parameters - ---------- - badge_yaml : SystemDict - The dictionary of badges. - - filename : str - Defaults to /_REPORTS/badges.yaml - - """ - warn(("Using this function directly is deprecated, use BadgeReport " - "context manager instead."), DeprecationWarning, stacklevel=2) - if filename is None: - filename = "badges.yaml" - - if isinstance(badge_yaml, SystemDict): - badge_yaml = badge_yaml.dic - - path = Path(PKG_DIR, "_REPORTS", filename) - path.write_text(yaml.dump(badge_yaml), encoding="utf-8") - - -def make_badge_report(badge_filename=None, report_filename=None): - """ - Generates the badges.md file which describes the state of the packages - """ - warn(("Using this function directly is deprecated, use BadgeReport " - "context manager instead."), DeprecationWarning, stacklevel=2) - if badge_filename is None: - badge_filename = "badges.yaml" - if report_filename is None: - report_filename = "badges.md" - - badge_dict = load_badge_yaml(badge_filename) - - path = Path(PKG_DIR, "_REPORTS", report_filename) - with path.open("w", encoding="utf-8") as file: - make_entries(file, badge_dict.dic) - - -def _get_nested_header(key: str, level: int) -> str: - if level > 2: - return f"* {key}: " - return f"{'#' * (level + 2)} {key.title() if level else key}" - - -def make_entries(stream: TextIO, entry, level=0) -> None: - """ - Recursively write lines of text from a nested dictionary to text stream. - - Parameters - ---------- - stream : TextIO - I/O stream to write the badges to. - - entry : dict, str, bool, float, int - A level from a nested dictionary - - level : int - How far down the rabbit hole we are w.r.t the nested dictionary - - Returns - ------- - None - """ - if not isinstance(entry, Mapping): - return - - for key, value in entry.items(): - stream.write("\n") - stream.write(" " * (level - 2)) - if isinstance(value, Mapping): - stream.write(_get_nested_header(key, level)) - # recursive - make_entries(stream, value, level=level+1) - else: - if level > 1: - stream.write("* ") - Badge(key, value).write(stream) - - -if __name__ == "__main__": - make_badge_report() diff --git a/irdb/public_html/index.html b/irdb/public_html/index.html deleted file mode 100644 index 9ccab4ba..00000000 --- a/irdb/public_html/index.html +++ /dev/null @@ -1,59 +0,0 @@ - - - - - - -

- - -

- -
- -

-SimCADO is currently moving forward -

-

-i.e. we're undertaking a reasonably sized refactor of the code base -

- -

- -simcado.readthedocs.io
-

- - -The old documentation has been moved to readthedocs and is updated automatically when we push to GitHub -

- - -

-This interuption is temporary. -We intend to release the newest version around Christmas 2018.
-Thank you for your patience! -

- -

-Important information: -

-

- -

-WARNING: MORFEO PSFs are subject to strict conditions. Before you publish anything -with these PSFs, please contact either the SimCADO team or the MORFEO team directly -

- - - - - diff --git a/irdb/public_html/index.php b/irdb/public_html/index.php deleted file mode 100644 index 3a56eef8..00000000 --- a/irdb/public_html/index.php +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - EzPHP - - -
-

Welcome to your personal web server

-

- -

- -

-


-

- -

-

- - \ No newline at end of file diff --git a/irdb/public_html/index.rst b/irdb/public_html/index.rst deleted file mode 100644 index 97305335..00000000 --- a/irdb/public_html/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -ScopeSim is moving forward -========================== - -2022-07-01: Broken ScopeSim server FTP address ----------------------------------------------- - -.. warning:: July 2022: The downloadable content server was retired and the data migrated to a new server. - - vX.Y and above have been redirected to a new server URL. - Please either upgrade to the latest version (``pip install --upgrade ``), or follow these `instructions to update the server URL `_ in the config file. - - -Sunsetting SimCADO and SimMETIS ---------------------------------- -To all those still using SimCADO or SimMETIS, we thank you for continuing to use the original versions of our simulator software. -The upgrades to the server infrastructure at the University of Vienna now mean that the functions for updating internal data files will no longer work. -We will not be releasing an upgrade to SimCADO or SimMETIS as these packages have been replaced by the new multipurpose -[ScopeSim environment](https://scopesim.readthedocs.io/en/latest/) diff --git a/irdb/repairs/__init__.py b/irdb/repairs/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/irdb/repairs/repair_scripts.py b/irdb/repairs/repair_scripts.py deleted file mode 100644 index 186c2798..00000000 --- a/irdb/repairs/repair_scripts.py +++ /dev/null @@ -1,33 +0,0 @@ -import glob -from astropy.io import fits - - -def repair_fits_headers(filename): - with fits.open(filename, mode="update") as hdulist: - for hdu in hdulist: - - keys = ["CDELT1", "CDELT2", "CDELT1", "CDELT2", "PIXELSCL", "WAVE0"] - keys_inv = ["CD1_1", "CD2_2", "PIXELSCL", "PIXELSCL", "CDELT1", - "WAVELENG"] - for key, key_inv in zip(keys, keys_inv): - if key not in hdu.header and key_inv in hdu.header: - hdu.header[key] = hdu.header[key_inv] - - if isinstance(hdu, (fits.PrimaryHDU, fits.ImageHDU)) and \ - hdu.data is not None: - missing_keys = ["CRVAL1", "CRVAL2", "CTYPE1", "CTYPE2", - "CRPIX1", "CRPIX2", 'CUNIT1', 'CUNIT2'] - missing_vals = [0, 0, "RA---TAN", "DEC--TAN", - hdu.data.shape[0] / 2., hdu.data.shape[1] / 2., - "arcsec", "arcsec"] - - for key, val in zip(missing_keys, missing_vals): - if key not in hdu.header: - hdu.header[key] = val - - hdulist.flush() - - -psf_files = glob.glob("C:\Work\irdb\_PSFs\*.fits") -for fname in psf_files: - repair_fits_headers(fname) diff --git a/irdb/system_dict.py b/irdb/system_dict.py deleted file mode 100644 index 5d794ec4..00000000 --- a/irdb/system_dict.py +++ /dev/null @@ -1,98 +0,0 @@ -import warnings - - -class SystemDict(object): - def __init__(self, new_dict=None): - self.dic = {} - if isinstance(new_dict, dict): - self.update(new_dict) - elif isinstance(new_dict, list): - for entry in new_dict: - self.update(entry) - - def update(self, new_dict): - if isinstance(new_dict, dict) \ - and "alias" in new_dict \ - and "properties" in new_dict: - alias = new_dict["alias"] - if alias in self.dic: - self.dic[alias] = recursive_update(self.dic[alias], - new_dict["properties"]) - else: - self.dic[alias] = new_dict["properties"] - else: - self.dic = recursive_update(self.dic, new_dict) - - def __getitem__(self, item): - if isinstance(item, str) and item[0] == "!": - item_chunks = item[1:].split(".") - entry = self.dic - for item in item_chunks: - # This if-statement may cause issues - if item not in entry: - entry[item] = {} - entry = entry[item] - return entry - else: - return self.dic[item] - - def __setitem__(self, key, value): - if isinstance(key, str) and key[0] == "!": - key_chunks = key[1:].split(".") - entry = self.dic - for key in key_chunks[:-1]: - if key not in entry: - entry[key] = {} - entry = entry[key] - entry[key_chunks[-1]] = value - else: - self.dic[key] = value - - def __contains__(self, item): - if isinstance(item, str) and item[0] == "!": - item_chunks = item[1:].split(".") - entry = self.dic - for item in item_chunks: - if not isinstance(entry, dict) or item not in entry: - return False - entry = entry[item] - return True - else: - return item in self.dic - - def __repr__(self): - msg = " contents:" - for key in self.dic.keys(): - val = self.dic[key] - msg += "\n{}: ".format(key) - if isinstance(val, dict): - for subkey in val.keys(): - msg += "\n {}: {}".format(subkey, val[subkey]) - else: - msg += "{}\n".format(val) - return msg - - -def recursive_update(old_dict, new_dict): - if new_dict is not None: - for key in new_dict: - if key in old_dict: - if isinstance(old_dict[key], dict): - if isinstance(new_dict[key], dict): - old_dict[key] = recursive_update(old_dict[key], - new_dict[key]) - else: - warnings.warn("Overwriting dict: {} with non-dict: {}" - "".format(old_dict[key], new_dict[key])) - old_dict[key] = new_dict[key] - else: - if isinstance(new_dict[key], dict): - warnings.warn("Overwriting non-dict: {} with dict: {}" - "".format(old_dict[key], new_dict[key])) - old_dict[key] = new_dict[key] - else: - old_dict[key] = new_dict[key] - - return old_dict - - diff --git a/irdb/tests/OLD_tests/__init__.py b/irdb/tests/OLD_tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/irdb/tests/OLD_tests/packages.yaml b/irdb/tests/OLD_tests/packages.yaml deleted file mode 100644 index fc49bb3b..00000000 --- a/irdb/tests/OLD_tests/packages.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# Packages to be tested -ELT : - dir : ELT - ignore_files : [] - -METIS : - dir : METIS - ignore_files : [] - -MICADO : - dir : MICADO - ignore_files : [] - -test_package : - dir : test_package - ignore_files : [] diff --git a/irdb/tests/OLD_tests/tst_ascii_readablitly.py b/irdb/tests/OLD_tests/tst_ascii_readablitly.py deleted file mode 100644 index 817ce410..00000000 --- a/irdb/tests/OLD_tests/tst_ascii_readablitly.py +++ /dev/null @@ -1,129 +0,0 @@ -import os -import inspect -import warnings -from glob import glob - -import yaml -from astropy.io import ascii as ioascii -from astropy.table import Table - -# Tests for the ascii files: -# 1. Test astropy.io.ascii can return a table object for each file -# 2. Test that each ASCII header can be converted to a dictionary by pyYAML -# 3. Specific tests for each type of ASCII file - -cur_frame = os.path.dirname(inspect.getfile(inspect.currentframe())) -HOME = os.path.abspath(os.path.join(cur_frame, "../")) -REPORTS = os.path.abspath(os.path.join(HOME, "_REPORTS")) - -with open(os.path.join(HOME, "packages.yaml")) as f: - PKGS_DICT = yaml.full_load(f) - - -def get_ascii_files_in_package(pkg_dir): - if not os.path.exists(pkg_dir): - raise ValueError("{} doesn't exist".format(pkg_dir)) - - ascii_tags = [".dat", ".tbl"] - files = [] - for tag in ascii_tags: - files += glob(os.path.join(pkg_dir, "*"+tag)) - - return files - - -def convert_table_comments_to_dict(tbl): - - comments_dict = None - if "comments" in tbl.meta: - try: - comments_dict = yaml.full_load("\n".join(tbl.meta["comments"])) - except: - warnings.warn("Couldn't convert .meta['comments'] to dict") - comments_dict = tbl.meta["comments"] - else: - warnings.warn("No comments in table") - - return comments_dict - - -def write_report(filename, dic, tag): - passing_url = "[![](https://img.shields.io/badge/{}-passing-green.svg)]()" - failing_url = "[![](https://img.shields.io/badge/{}-failing-red.svg)]()" - - with open(os.path.join(REPORTS, filename), "w") as f: - f.write("# REPORT : {} \n\n".format(tag.replace("_", " "))) - - for pkg, files in dic.items(): - f.write("# ``{}`` package\n\n".format(pkg)) - if len(files) > 0: - f.write(failing_url.format(tag) + "\n\n") - f.write("The following files have headers which are not in the " - "YAML format: \n\n") - for file in files: - f.write("- ``{}``\n".format(file)) - else: - f.write(passing_url.format(tag) + "\n\n") - f.write("All ASCII file headers are in the YAML format\n\n") - f.write("\n\n") - - -def test_all_ascii_files_readable_by_astropy_io_ascii(): - - tbl_failed_dict = {} - meta_failed_dict = {} - colname_failed_dict = {} - - for pkg in PKGS_DICT: - - tbl_passed = [] - tbl_failed = [] - - meta_passed = [] - meta_failed = [] - - colname_passed = [] - colname_failed = [] - - test_dir = PKGS_DICT[pkg]["dir"] - test_files = get_ascii_files_in_package(os.path.join(HOME, test_dir)) - - for file in test_files: - tbl = ioascii.read(file, fast_reader=False) - print(file) - if isinstance(tbl, Table): - tbl_passed += [file] - else: - tbl_failed += [file] - - meta = convert_table_comments_to_dict(tbl) - if isinstance(meta, dict): - meta_passed += [file] - else: - meta_failed += [file] - - if tbl.colnames[0] != "col0": - colname_passed += [file] - else: - colname_failed += [file] - - tbl_failed_dict[pkg] = tbl_failed - meta_failed_dict[pkg] = meta_failed - colname_failed_dict[pkg] = colname_failed - - write_report("failed_ascii_table.md", tbl_failed_dict, "ASCII_table_format") - write_report("failed_ascii_meta.md", meta_failed_dict, "ASCII_meta_format") - write_report("failed_ascii_colnames.md", meta_failed_dict, "ASCII_colnames") - - print("Tables failing to be read") - print(tbl_failed_dict) - print("Meta data failing to be read") - print(meta_failed_dict) - print("Table column names failing to be read") - print(colname_failed_dict) - - for pkg in tbl_failed_dict: - assert len(tbl_failed_dict[pkg]) == 0 - - for pkg in meta_failed_dict: - assert len(meta_failed_dict[pkg]) == 0 diff --git a/irdb/tests/OLD_tests/tst_psf_headers.py b/irdb/tests/OLD_tests/tst_psf_headers.py deleted file mode 100644 index c1a86749..00000000 --- a/irdb/tests/OLD_tests/tst_psf_headers.py +++ /dev/null @@ -1,40 +0,0 @@ -import os -import glob -import warnings - -import numpy as np -from astropy.io import fits - - -def check_all_psf_files_have_correct_header_keywords_in_exts(): - psf_files = glob.glob("C:\Work\irdb\_PSFs\*.fits") - - ext_keys = ["CDELT1", "CDELT2", "CRPIX1", "CRPIX2", "CRVAL1", "CRVAL2", - "CUNIT1", "CUNIT2", "CTYPE1", "CTYPE2", "WAVE0", "PIXELSCL"] - ext_keys_arr = np.array(ext_keys) - - incomplete_pfs_files = {} - - for fname in psf_files: - with fits.open(fname) as hdulist: - for ext, hdu in enumerate(hdulist): - if isinstance(hdu, (fits.ImageHDU, fits.PrimaryHDU)) and \ - hdu.data is not None and "CATTYPE" not in hdu.header: - key_mask = [key in hdu.header for key in ext_keys] - if not all(key_mask): - inv_mask = np.invert(key_mask) - ext_name = "{}[{}]".format(fname, ext) - missing_keys = ext_keys_arr[inv_mask] - msg = "{} is missing keywords: {}" \ - "".format(ext_name, missing_keys) - warnings.warn(msg) - - incomplete_pfs_files[ext_name] = list(missing_keys) - - return incomplete_pfs_files - - -def test_psf_fits_headers(): - if os.environ["USERNAME"] == "Kieran": - psf_dict = check_all_psf_files_have_correct_header_keywords_in_exts() - assert len(psf_dict) == 0 diff --git a/irdb/tests/test_badges.py b/irdb/tests/test_badges.py deleted file mode 100644 index 0b422737..00000000 --- a/irdb/tests/test_badges.py +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -""" -Tests for irdb.badges -""" - -from io import StringIO -from unittest import mock - -import yaml -import pytest - -from irdb.badges import BadgeReport, Badge, BoolBadge, NumBadge, StrBadge, \ - MsgOnlyBadge -from irdb.system_dict import SystemDict - - -@pytest.fixture(name="temp_dir", scope="module") -def fixture_temp_dir(tmp_path_factory): - tmpdir = tmp_path_factory.mktemp("PKG_DIR") - (tmpdir / "_REPORTS").mkdir() - return tmpdir - - -class TestBadgeSubclasses: - def test_bool(self): - assert isinstance(Badge("bogus", True), BoolBadge) - assert isinstance(Badge("bogus", False), BoolBadge) - - def test_num(self): - assert isinstance(Badge("bogus", 7), NumBadge) - assert isinstance(Badge("bogus", 3.14), NumBadge) - - def test_str(self): - assert isinstance(Badge("bogus", "foo"), StrBadge) - - def test_msgonly(self): - assert isinstance(Badge("bogus", "!foo"), MsgOnlyBadge) - - -class TestColours: - @pytest.mark.parametrize("value, colour", [ - ("observation", "blueviolet"), - ("support", "deepskyblue"), - ("error", "red"), - ("missing", "red"), - ("warning", "orange"), - ("conflict", "orange"), - ("incomplete", "orange"), - ("ok", "green"), - ("found", "green"), - ("not_found", "red"), - ("none", "yellowgreen"), - ]) - def test_special_strings(self, value, colour): - assert Badge("bogus", value).colour == colour - - def test_bool(self): - assert Badge("bogus", True).colour == "green" - assert Badge("bogus", False).colour == "red" - - def test_num(self): - assert Badge("bogus", 7).colour == "lightblue" - - -class TestPattern: - def test_simple(self): - with StringIO() as str_stream: - Badge("bogus", "Error").write(str_stream) - pattern = "[![](https://img.shields.io/badge/bogus-Error-red)]()" - assert pattern in str_stream.getvalue() - - def test_msg_only(self): - with StringIO() as str_stream: - Badge("bogus", "!OK").write(str_stream) - pattern = "[![](https://img.shields.io/badge/bogus-green)]()" - assert pattern in str_stream.getvalue() - - -class TestSpecialChars: - def test_space(self): - badge = Badge("bogus foo", "bar baz") - assert badge.key == "bogus_foo" - assert badge.value == "bar_baz" - - def test_dash(self): - badge = Badge("bogus-foo", "bar-baz") - assert badge.key == "bogus--foo" - assert badge.value == "bar--baz" - - -class TestReport: - # TODO: the repeated setup stuff should be a fixture or something I guess - - @pytest.mark.usefixtures("temp_dir") - def test_writes_yaml(self, temp_dir): - with mock.patch("irdb.badges.PKG_DIR", temp_dir): - with BadgeReport("test.yaml", "test.md") as report: - report["!foo.bar"] = "bogus" - assert (temp_dir / "_REPORTS/test.yaml").exists() - - @pytest.mark.usefixtures("temp_dir") - def test_writes_md(self, temp_dir): - with mock.patch("irdb.badges.PKG_DIR", temp_dir): - with BadgeReport("test.yaml", "test.md") as report: - report["!foo.bar"] = "bogus" - assert (temp_dir / "_REPORTS/test.md").exists() - - @pytest.mark.usefixtures("temp_dir") - def test_yaml_content(self, temp_dir): - with mock.patch("irdb.badges.PKG_DIR", temp_dir): - with BadgeReport("test.yaml", "test.md") as report: - report["!foo.bar"] = "bogus" - path = temp_dir / "_REPORTS/test.yaml" - with path.open(encoding="utf-8") as file: - dic = SystemDict(yaml.full_load(file)) - assert "!foo.bar" in dic - assert dic["!foo.bar"] == "bogus" - - @pytest.mark.usefixtures("temp_dir") - def test_md_content(self, temp_dir): - with mock.patch("irdb.badges.PKG_DIR", temp_dir): - with BadgeReport("test.yaml", "test.md") as report: - report["!foo.bar"] = "bogus" - path = temp_dir / "_REPORTS/test.md" - markdown = path.read_text(encoding="utf-8") - assert "## foo" in markdown - badge = "[![](https://img.shields.io/badge/bar-bogus-lightgrey)]()" - assert badge in markdown diff --git a/irdb/tests/test_package_contents.py b/irdb/tests/test_package_contents.py index 5725d41d..b7a65d46 100644 --- a/irdb/tests/test_package_contents.py +++ b/irdb/tests/test_package_contents.py @@ -1,25 +1,20 @@ -#!/usr/bin/env python3 # -*- coding: utf-8 -*- +"""Runs the badge reports tests.""" + import logging from pathlib import Path import pytest import yaml -from scopesim.effects.data_container import DataContainer from astropy.io.ascii import InconsistentTableError +from scopesim.effects.data_container import DataContainer +from astar_utils import BadgeReport + from irdb.utils import get_packages, recursive_filename_search -from irdb.badges import BadgeReport from irdb.fileversions import IRDBFile -# HACK: This is necessary because scopesim has import side effects that mess up -# logging here, specifically capture. Once that's solved, the following -# lines should be removed! -from importlib import reload -logging.shutdown() -reload(logging) - # Note: This module doesn't need to run always, so mark it. pytestmark = pytest.mark.badges @@ -172,8 +167,7 @@ def test_all_dat_files_readable(self, package, pkg_dir, badges, caplog): for fn_dat in fns_dat: fn_loc = fn_dat.relative_to(pkg_dir) try: - # FIXME: DataContainer should be updated to support Path objects... - _ = DataContainer(str(fn_dat)) + _ = DataContainer(fn_dat) except InconsistentTableError as err: logging.error("%s InconsistentTableError %s", str(fn_loc), err) bad_files.append(str(fn_loc)) diff --git a/irdb/tests/test_publish.py b/irdb/tests/test_publish.py index b66eaf29..37423178 100644 --- a/irdb/tests/test_publish.py +++ b/irdb/tests/test_publish.py @@ -65,6 +65,9 @@ # # Put the original values back. # PATH_TEST_PACKAGE_VERSION_YAML.write_bytes(b_yaml_test_package) +# Note: This module doesn't need to run always, so mark it. +pytestmark = pytest.mark.irdb + @pytest.fixture(scope="module") def temp_zipfiles(tmp_path_factory): @@ -77,7 +80,6 @@ def temp_zipfiles(tmp_path_factory): return tmpdir -@pytest.mark.usefixtures("temp_zipfiles") class TestGetLocalPath: def test_stable(self, temp_zipfiles): with mock.patch("irdb.publish.ZIPPED_DIR", temp_zipfiles): @@ -145,7 +147,6 @@ def fixture_default_argv(): @pytest.mark.webtest -@pytest.mark.usefixtures("default_argv", "temp_zipfiles") @pytest.mark.parametrize("argv, called, response", [ (["-u"], False, None), (["-u", "-s", "--no-confirm"], False, None), @@ -171,7 +172,6 @@ def test_call_confirm(default_argv, temp_zipfiles, argv, called, response): assert mock_confirm.called == called -@pytest.mark.usefixtures("default_argv") @pytest.mark.parametrize("argv, called, args", [ ([], False, {}), (["-c"], True, {"stable": False, "keep_version": False}), @@ -191,7 +191,6 @@ def test_make_package_called(default_argv, argv, called, args): mock_mkpkg.assert_not_called() -@pytest.mark.usefixtures("default_argv") @pytest.mark.parametrize("argv, called, args", [ ([], False, {}), (["-u"], True, {"stable": False, "no_confirm": False}), @@ -213,7 +212,6 @@ def test_push_to_server_called(default_argv, argv, called, args): mock_phsvr.assert_not_called() -@pytest.mark.usefixtures("default_argv") def test_multiple_packages(default_argv): argv = ["foo_package", "bar_package", "-c", "-u"] with mock.patch("sys.argv", default_argv + argv): @@ -226,7 +224,6 @@ def test_multiple_packages(default_argv): assert "bar_package" in mock_phsvr.call_args[0] -@pytest.mark.usefixtures("default_argv", "caplog") def test_warning_no_action(default_argv, caplog): warnmsg = ("Neither `compile` nor `upload` was set. " "No action will be performed.") diff --git a/irdb/tests/test_utils.py b/irdb/tests/test_utils.py index 3057a735..e39847e0 100644 --- a/irdb/tests/test_utils.py +++ b/irdb/tests/test_utils.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Tests for irdb.utils @@ -13,26 +12,26 @@ from irdb.utils import get_packages +# Note: This module doesn't need to run always, so mark it. +pytestmark = pytest.mark.irdb + + @pytest.fixture(name="packages", scope="class") def fixture_packages(): return dict(get_packages()) class TestGetPackages: - @pytest.mark.usefixtures("packages") def test_includes_various_packages(self, packages): wanted = {"Armazones", "ELT", "METIS", "MICADO", "test_package"} assert all(pkg_name in packages.keys() for pkg_name in wanted) - @pytest.mark.usefixtures("packages") def test_doesnt_includes_specials(self, packages): wanted = {"irdb", "docs", "_REPORTS", ".github"} assert all(pkg_name not in packages.keys() for pkg_name in wanted) - @pytest.mark.usefixtures("packages") def test_values_are_path_objects(self, packages): assert isinstance(packages["test_package"], Path) - @pytest.mark.usefixtures("packages") def test_only_includes_dirs(self, packages): assert all(path.is_dir() for path in packages.values()) diff --git a/irdb/utils.py b/irdb/utils.py index a0984d75..3600be02 100644 --- a/irdb/utils.py +++ b/irdb/utils.py @@ -1,9 +1,5 @@ -import os -from os import path as pth from pathlib import Path -import yaml -from irdb.system_dict import SystemDict PKG_DIR = Path(__file__).parent.parent diff --git a/irdb/version.py b/irdb/version.py deleted file mode 100644 index 25bacde1..00000000 --- a/irdb/version.py +++ /dev/null @@ -1,11 +0,0 @@ -version = '0.2.0' -date = '2022-04-09 13:00:00 GMT' -yaml_descriptions = """ -- version : 0.2.0 - date : 2022-04-09 - comment : Random updates - changes : - - (KL) re-wrote the publish workflow to include dates and versioning of - packages - -""" diff --git a/pytest.ini b/pytest.ini index 34d8628e..0947adaf 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,8 +1,9 @@ [pytest] # Prevent recursion into MICADO/docs/example_notebooks/inst_pkgs -addopts = --ignore-glob="*/inst_pkgs/*" -p no:randomly -m "not badges" +addopts = --ignore-glob="*/inst_pkgs/*" -p no:randomly -m "not badges and not irdb" # Badge report needs order (at least for now, should be solved by using astar-utils NestedMapping) markers = webtest: mark a test as using network resources. slow: mark test as slow. badges: tests for the badge report + irdb: tests for IRDB functionality (unrelated to a specific instrument package) diff --git a/requirements.github_actions.txt b/requirements.github_actions.txt index 4f05156c..6d5662ee 100644 --- a/requirements.github_actions.txt +++ b/requirements.github_actions.txt @@ -10,3 +10,4 @@ scopesim_templates jupytext ipykernel nbconvert +astar-utils