Skip to content

Commit

Permalink
Merge pull request #25 from SyneRBI/metrics-and-docs
Browse files Browse the repository at this point in the history
metrics and docs
  • Loading branch information
casperdcl authored Jun 28, 2024
2 parents 9d8e988 + ac6401c commit 5483ba6
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 29 deletions.
1 change: 1 addition & 0 deletions .github/workflows/run.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ jobs:
shell: bash -el {0}
run: |
source /opt/SIRF-SuperBuild/INSTALL/bin/env_sirf.sh
pip install git+https://github.com/TomographicImaging/Hackathon-000-Stochastic-QualityMetrics
if test -f apt.txt; then
sudo apt-get update
xargs -a apt.txt sudo apt-get install -y
Expand Down
30 changes: 18 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,24 +28,30 @@ You can also find some example notebooks here which should help you with your de

## Organiser setup

The organisers will effectively execute:
The organisers will execute (after downloading https://petric.tomography.stfc.ac.uk/data/ to `/path/to/data`):

```sh
docker run --rm -it -v data:/mnt/share/petric:ro ghcr.io/synerbi/sirf:edge-gpu
# or ideally ghcr.io/synerbi/sirf:latest-gpu after the next SIRF release!
python
docker run --rm -it -v /path/to/data:/mnt/share/petric:ro -v .:/workdir -w /workdir synerbi/sirf:edge-gpu /bin/bash
# ... or ideally synerbi/sirf:latest-gpu after the next SIRF release!
pip install git+https://github.com/TomographicImaging/Hackathon-000-Stochastic-QualityMetrics
# ... conda/pip/apt install environment.yml/requirements.txt/apt.txt
python petric.py
```

```python
from main import Submission, submission_callbacks # your submission
from petric import data, metrics # our data & evaluation
assert issubclass(Submission, cil.optimisation.algorithms.Algorithm)
with Timeout(minutes=5):
Submission(data).run(np.inf, callbacks=metrics + submission_callbacks)
```
> [!TIP]
> `petric.py` will effectively execute:
>
> ```python
> from main import Submission, submission_callbacks # your submission
> from petric import data, metrics # our data & evaluation
> assert issubclass(Submission, cil.optimisation.algorithms.Algorithm)
> Submission(data).run(numpy.inf, callbacks=metrics + submission_callbacks)
> ```
<!-- br -->
> [!WARNING]
> To avoid timing out, please disable any debugging/plotting code before submitting!
> To avoid timing out (5 min runtime), please disable any debugging/plotting code before submitting!
> This includes removing any progress/logging from `submission_callbacks`.
- `metrics` are described in the [wiki](https://github.com/SyneRBI/PETRIC/wiki), but are not yet part of this repository
Expand Down
1 change: 1 addition & 0 deletions main_SGD.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def __init__(self, data: Dataset, num_subsets: int = 7, step_size: float = 1e-10
"""
Initialisation function, setting up data & (hyper)parameters.
NB: in practice, `num_subsets` should likely be determined from the data.
WARNING: we also currently ignore the non-negativity constraint here.
This is just an example. Try to modify and improve it!
"""
data_sub, acq_models, obj_funs = partitioner.data_partition(data.acquired_data, data.additive_term,
Expand Down
54 changes: 37 additions & 17 deletions petric.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,14 @@
from pathlib import Path
from time import time

from numpy import clip, inf, loadtxt
import numpy as np
from skimage.metrics import mean_squared_error, peak_signal_noise_ratio
from tensorboardX import SummaryWriter

import sirf.STIR as STIR
from cil.optimisation.algorithms import Algorithm
from cil.optimisation.utilities import callbacks as cbks
from img_quality_cil_stir import ImageQualityCallback

TEAM = os.getenv("GITHUB_REPOSITORY", "SyneRBI/PETRIC-").split("/PETRIC-", 1)[-1]
VERSION = os.getenv("GITHUB_REF_NAME", "")
Expand All @@ -44,13 +46,14 @@ def __call__(self, algo: Algorithm):


class TensorBoard(cbks.Callback):
"""Log image slices & objective value"""
def __init__(self, verbose=1, transverse_slice=None, coronal_slice=None, vmax=None, logdir=OUTDIR):
super().__init__(verbose)
self.transverse_slice = transverse_slice
self.coronal_slice = coronal_slice
self.vmax = vmax
self.x_prev = None
self.tb = SummaryWriter(logdir=str(logdir))
self.tb = logdir if isinstance(logdir, SummaryWriter) else SummaryWriter(logdir=str(logdir))

def __call__(self, algo: Algorithm):
if algo.iteration % algo.update_objective_interval != 0 and algo.iteration != algo.max_iteration:
Expand All @@ -66,20 +69,32 @@ def __call__(self, algo: Algorithm):
self.tb.add_scalar("normalised_change", normalised_change, algo.iteration)
self.x_prev = algo.x.clone()
self.tb.add_image("transverse",
clip(algo.x.as_array()[self.transverse_slice:self.transverse_slice + 1] / self.vmax, 0, 1),
np.clip(algo.x.as_array()[self.transverse_slice:self.transverse_slice + 1] / self.vmax, 0, 1),
algo.iteration)
self.tb.add_image("coronal", clip(algo.x.as_array()[None, :, self.coronal_slice] / self.vmax, 0, 1),
self.tb.add_image("coronal", np.clip(algo.x.as_array()[None, :, self.coronal_slice] / self.vmax, 0, 1),
algo.iteration)


class MetricsWithTimeout(cbks.Callback):
"""Stops the algorithm after `seconds`"""
def __init__(self, seconds=300, outdir=OUTDIR, transverse_slice=None, coronal_slice=None, verbose=1):
def __init__(self, seconds=300, outdir=OUTDIR, transverse_slice=None, coronal_slice=None, reference_image=None,
verbose=1):
super().__init__(verbose)
self.callbacks = [
cbks.ProgressCallback(),
SaveIters(outdir=outdir),
TensorBoard(logdir=outdir, transverse_slice=transverse_slice, coronal_slice=coronal_slice)]
(tb_cbk := TensorBoard(logdir=outdir, transverse_slice=transverse_slice, coronal_slice=coronal_slice))]

if reference_image:
roi_image_dict = {f'S{i}': STIR.ImageData(f'S{i}.hv') for i in range(1, 8)}
# NB: these metrics are for testing only.
# The final evaluation will use metrics described in https://github.com/SyneRBI/PETRIC/wiki
self.callbacks.append(
ImageQualityCallback(
reference_image, tb_cbk.tb, roi_mask_dict=roi_image_dict, metrics_dict={
'MSE': mean_squared_error, 'MAE': self.mean_absolute_error, 'PSNR': peak_signal_noise_ratio},
statistics_dict={'MEAN': np.mean, 'STDDEV': np.std, 'MAX': np.max}))

self.limit = time() + seconds

def __call__(self, algorithm: Algorithm):
Expand All @@ -90,6 +105,10 @@ def __call__(self, algorithm: Algorithm):
c(algorithm)
self.limit += time() - now

@staticmethod
def mean_absolute_error(y, x):
return np.mean(np.abs(y, x))


def construct_RDP(penalty_strength, initial_image, kappa, max_scaling=1e-3):
"""
Expand Down Expand Up @@ -125,7 +144,7 @@ def get_data(srcdir=".", outdir=OUTDIR, sirf_verbosity=0):
OSEM_image = STIR.ImageData(str(srcdir / 'OSEM_image.hv'))
kappa = STIR.ImageData(str(srcdir / 'kappa.hv'))
if (penalty_strength_file := (srcdir / 'penalisation_factor.txt')).is_file():
penalty_strength = float(loadtxt(penalty_strength_file))
penalty_strength = float(np.loadtxt(penalty_strength_file))
else:
penalty_strength = 1 / 700 # default choice
prior = construct_RDP(penalty_strength, OSEM_image, kappa)
Expand All @@ -134,20 +153,21 @@ def get_data(srcdir=".", outdir=OUTDIR, sirf_verbosity=0):


if SRCDIR.is_dir():
metrics_data_pairs = [([MetricsWithTimeout(outdir=OUTDIR / "mMR_NEMA", transverse_slice=72, coronal_slice=109)],
get_data(srcdir=SRCDIR / "Siemens_mMR_NEMA_IQ", outdir=OUTDIR / "mMR_NEMA")),
([MetricsWithTimeout(outdir=OUTDIR / "NeuroLF_Hoffman", transverse_slice=72)],
get_data(srcdir=SRCDIR / "NeuroLF_Hoffman_Dataset", outdir=OUTDIR / "NeuroLF_Hoffman")),
([MetricsWithTimeout(outdir=OUTDIR / "Vision600_thorax")],
get_data(srcdir=SRCDIR / "Siemens_Vision600_thorax", outdir=OUTDIR / "Vision600_thorax"))]
data_metrics_pairs = [
(get_data(srcdir=SRCDIR / "Siemens_mMR_NEMA_IQ", outdir=OUTDIR / "mMR_NEMA"),
[MetricsWithTimeout(outdir=OUTDIR / "mMR_NEMA", transverse_slice=72, coronal_slice=109)]),
(get_data(srcdir=SRCDIR / "NeuroLF_Hoffman_Dataset", outdir=OUTDIR / "NeuroLF_Hoffman"),
[MetricsWithTimeout(outdir=OUTDIR / "NeuroLF_Hoffman", transverse_slice=72)]),
(get_data(srcdir=SRCDIR / "Siemens_Vision600_thorax",
outdir=OUTDIR / "Vision600_thorax"), [MetricsWithTimeout(outdir=OUTDIR / "Vision600_thorax")])]
else:
metrics_data_pairs = [([], None)]
data_metrics_pairs = [(None, [])]
# first dataset
metrics, data = metrics_data_pairs[0]
data, metrics = data_metrics_pairs[0]

if __name__ == "__main__":
from main import Submission, submission_callbacks
assert issubclass(Submission, Algorithm)
for metrics, data in metrics_data_pairs:
for data, metrics in data_metrics_pairs:
algo = Submission(data)
algo.run(inf, callbacks=metrics + submission_callbacks)
algo.run(np.inf, callbacks=metrics + submission_callbacks)

0 comments on commit 5483ba6

Please sign in to comment.