Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci/test: use bare ipython instead of nbval #324

Merged
merged 17 commits into from
Jul 19, 2024
51 changes: 38 additions & 13 deletions .actions/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def get_running_cuda_version() -> str:
return ""


def get_running_torch_version():
def get_running_torch_version() -> str:
"""Extract the version of actual PyTorch for this runtime."""
try:
import torch
Expand Down Expand Up @@ -322,7 +322,13 @@ def bash_render(folder: str, output_file: str = PATH_SCRIPT_RENDER) -> Optional[
# dry run does not execute the notebooks just takes them as they are
cmd.append(f"cp {ipynb_file} {pub_ipynb}")
# copy and add meta config
cmd += [f"cp {meta_file} {pub_meta}", f"cat {pub_meta}", f"git add {pub_meta}"]
cmd += [
f"cp {meta_file} {pub_meta}",
'echo "#====== START OF YAML FILE ======#"',
f"cat {pub_meta}",
'echo "#======= END OF YAML FILE =======#"',
f"git add {pub_meta}",
]
else:
pip_req, pip_args = AssistantCLI._parse_requirements(folder)
cmd += [f"pip install {pip_req} --quiet {pip_args}", "pip list"]
Expand All @@ -335,7 +341,13 @@ def bash_render(folder: str, output_file: str = PATH_SCRIPT_RENDER) -> Optional[
# Export the actual packages used in runtime
cmd.append(f"meta_file=$(python .actions/assistant.py update-env-details {folder})")
# copy and add to version the enriched meta config
cmd += ["echo $meta_file", "cat $meta_file", "git add $meta_file"]
cmd += [
"echo $meta_file",
'echo "#====== START OF YAML FILE ======#"',
"cat $meta_file",
'echo "#======= END OF YAML FILE =======#"',
"git add $meta_file",
]
# if thumb image is linked to the notebook, copy and version it too
if thumb_file:
cmd += [f"cp {thumb_file} {pub_thumb}", f"git add {pub_thumb}"]
Expand All @@ -347,7 +359,7 @@ def bash_render(folder: str, output_file: str = PATH_SCRIPT_RENDER) -> Optional[
fopen.write(os.linesep.join(cmd))

@staticmethod
def bash_test(folder: str, output_file: str = PATH_SCRIPT_TEST) -> Optional[str]:
def bash_test(folder: str, output_file: str = PATH_SCRIPT_TEST, virtualenv: bool = False) -> Optional[str]:
"""Prepare bash script for running tests of a particular notebook.

Args:
Expand All @@ -364,11 +376,12 @@ def bash_test(folder: str, output_file: str = PATH_SCRIPT_TEST) -> Optional[str]

# prepare isolated environment with inheriting the global packages
path_venv = os.path.join(folder, "venv")
cmd += [
f"python -m virtualenv --system-site-packages {path_venv}",
f"source {os.path.join(path_venv, 'bin', 'activate')}",
"pip --version",
]
if virtualenv:
cmd += [
f"python -m virtualenv --system-site-packages {path_venv}",
f"source {os.path.join(path_venv, 'bin', 'activate')}",
"pip --version",
]

cmd.append(f"# available: {AssistantCLI.DEVICE_ACCELERATOR}")
if AssistantCLI._valid_accelerator(folder):
Expand All @@ -378,21 +391,30 @@ def bash_test(folder: str, output_file: str = PATH_SCRIPT_TEST) -> Optional[str]
# Export the actual packages used in runtime
cmd.append(f"meta_file=$(python .actions/assistant.py update-env-details {folder} --base_path .)")
# show created meta config
cmd += ["echo $meta_file", "cat $meta_file"]
cmd.append(f"python -m pytest {ipynb_file} -v --nbval --nbval-cell-timeout=300")
cmd += [
"echo $meta_file",
'echo "#====== START OF YAML FILE ======#"',
"cat $meta_file",
'echo "#======= END OF YAML FILE =======#"',
]
# use standard jupyter's executable via CMD
cmd.append(f"jupyter execute {ipynb_file} --inplace")
else:
pub_ipynb = os.path.join(DIR_NOTEBOOKS, f"{folder}.ipynb")
pub_meta = pub_ipynb.replace(".ipynb", ".yaml")
# copy and add meta config
cmd += [
f"mkdir -p {os.path.dirname(pub_meta)}",
f"cp {meta_file} {pub_meta}",
'echo "#====== START OF YAML FILE ======#"',
f"cat {pub_meta}",
'echo "#======= END OF YAML FILE =======#"',
f"git add {pub_meta}",
]
warn("Invalid notebook's accelerator for this device. So no tests will be run!!!", RuntimeWarning)
# deactivate and clean local environment
cmd += ["deactivate", f"rm -rf {os.path.join(folder, 'venv')}"]
if virtualenv:
cmd += ["deactivate", f"rm -rf {os.path.join(folder, 'venv')}"]
if not output_file:
return os.linesep.join(cmd)
with open(output_file, "w") as fopen:
Expand Down Expand Up @@ -707,7 +729,10 @@ def update_env_details(folder: str, base_path: str = DIR_NOTEBOOKS) -> str:

Args:
folder: path to the folder
base_path:
base_path: base path with notebooks

Returns:
path the updated YAML file

"""
meta = AssistantCLI._load_meta(folder)
Expand Down
13 changes: 8 additions & 5 deletions .azure/ipynb-tests.yml → .azure/ipynb-validate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,12 @@ jobs:
displayName: "Install dependencies"

- bash: |
head=$(git rev-parse origin/main)
printf "Head: $head\n"
git diff --name-only $head --output=target-diff.txt
git fetch --all # some issues with missing main :/
# head=$(git rev-parse origin/main)
# printf "Head: $head\n" # this shall be commit hash
# git diff --name-only $head --output=target-diff.txt
git diff --name-only origin/main HEAD --output=target-diff.txt
cat target-diff.txt
python .actions/assistant.py group-folders --fpath_gitdiff=target-diff.txt
printf "Changed folders:\n"
cat changed-folders.txt
Expand All @@ -35,7 +38,7 @@ jobs:
- bash: echo '$(mtrx.dirs)' | python -m json.tool
displayName: "Show matrix"

- job: nbval
- job: ipython
dependsOn: check_diff
strategy:
matrix: $[ dependencies.check_diff.outputs['mtrx.dirs'] ]
Expand Down Expand Up @@ -96,4 +99,4 @@ jobs:
env:
KAGGLE_USERNAME: $(KAGGLE_USERNAME)
KAGGLE_KEY: $(KAGGLE_KEY)
displayName: "PyTest notebook"
displayName: "Execute notebook"
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# PytorchLightning Tutorials

[![CI internal](https://github.com/Lightning-AI/tutorials/actions/workflows/ci_test-acts.yml/badge.svg?event=push)](https://github.com/Lightning-AI/tutorials/actions/workflows/ci_test-acts.yml)
[![CI internal](https://github.com/Lightning-AI/tutorials/actions/workflows/ci_internal.yml/badge.svg?event=push)](https://github.com/Lightning-AI/tutorials/actions/workflows/ci_internal.yml)
[![Build Status](https://dev.azure.com/Lightning-AI/Tutorials/_apis/build/status/Lightning-AI.tutorials%20%5Bpublish%5D?branchName=main)](https://dev.azure.com/Lightning-AI/Tutorials/_build/latest?definitionId=29&branchName=main)
[![codecov](https://codecov.io/gh/Lightning-AI/tutorials/branch/main/graph/badge.svg?token=C6T3XOOR56)](https://codecov.io/gh/Lightning-AI/tutorials)
[![Deploy Docs](https://github.com/Lightning-AI/tutorials/actions/workflows/docs-deploy.yml/badge.svg)](https://github.com/Lightning-AI/tutorials/actions/workflows/docs-deploy.yml)
Expand Down Expand Up @@ -91,7 +91,7 @@ On the back side of publishing workflow you can find in principle these three st
# 1) convert script to notebooks
jupytext --set-formats ipynb,py:percent notebook.py

# 2) testing the created notebook
# 2) [OPTIONAL] testing the created notebook
pytest -v notebook.ipynb --nbval

# 3) generating notebooks outputs
Expand Down
1 change: 1 addition & 0 deletions _requirements/default.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ setuptools>=68.0.0, <69.1.0
matplotlib>=3.0.0, <3.9.0
ipython[notebook]>=8.0.0, <8.17.0
urllib3 # for ipython
numpy <2.0 # needed for older Torch
torch>=1.8.1, <2.1.0
pytorch-lightning>=1.4, <2.1.0
torchmetrics>=0.7, <1.3
2 changes: 1 addition & 1 deletion _requirements/devel.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@ virtualenv>=20.10
jupytext>=1.10, <1.15 # converting
pytest>=6.0, <7.0
# testing with own fork with extended cell timeout
https://github.com/Borda/nbval/archive/refs/heads/timeout-limit.zip
# https://github.com/Borda/nbval/archive/refs/heads/timeout-limit.zip
papermill>=2.3.4, <2.5.0 # render
1 change: 1 addition & 0 deletions course_UvA-DL/01-introduction-to-pytorch/.meta.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ author: Phillip Lippe
created: 2021-08-27
updated: 2023-03-14
license: CC BY-SA
build: 1
description: |
This tutorial will give a short introduction to PyTorch basics, and get you setup for writing your own neural networks.
This notebook is part of a lecture series on Deep Learning at the University of Amsterdam.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ author: Phillip Lippe
created: 2021-07-12
updated: 2023-03-14
license: CC BY-SA
build: 0
build: 1
tags:
- Image
description: |
Expand All @@ -22,7 +22,7 @@ requirements:
- torchvision
- matplotlib
- tensorboard
- lightning>=2.0.0
- pytorch-lightning>=2.0.0
accelerator:
- CPU
- GPU
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,6 @@
import urllib.request
from urllib.error import HTTPError

# PyTorch Lightning
import lightning as L

# Plotting
import matplotlib
import matplotlib.pyplot as plt
Expand All @@ -20,6 +17,9 @@
import matplotlib_inline.backend_inline
import numpy as np

# PyTorch Lightning
import pytorch_lightning as pl

# PyTorch
import torch
import torch.nn as nn
Expand All @@ -28,7 +28,7 @@

# Torchvision
import torchvision
from lightning.pytorch.callbacks import Callback, LearningRateMonitor, ModelCheckpoint
from pytorch_lightning.callbacks import Callback, LearningRateMonitor, ModelCheckpoint
from torchvision import transforms
from torchvision.datasets import MNIST

Expand All @@ -41,7 +41,7 @@
CHECKPOINT_PATH = os.environ.get("PATH_CHECKPOINT", "saved_models/tutorial8")

# Setting the seed
L.seed_everything(42)
pl.seed_everything(42)

# Ensure that all operations are deterministic on GPU (if used) for reproducibility
torch.backends.cudnn.deterministic = True
Expand Down Expand Up @@ -465,7 +465,7 @@ def generate_samples(model, inp_imgs, steps=60, step_size=10, return_img_per_ste


# %%
class DeepEnergyModel(L.LightningModule):
class DeepEnergyModel(pl.LightningModule):
def __init__(self, img_shape, batch_size, alpha=0.1, lr=1e-4, beta1=0.0, **CNN_args):
super().__init__()
self.save_hyperparameters()
Expand Down Expand Up @@ -640,7 +640,7 @@ def on_epoch_end(self, trainer, pl_module):
# %%
def train_model(**kwargs):
# Create a PyTorch Lightning trainer with the generation callback
trainer = L.Trainer(
trainer = pl.Trainer(
default_root_dir=os.path.join(CHECKPOINT_PATH, "MNIST"),
accelerator="auto",
devices=1,
Expand All @@ -660,7 +660,7 @@ def train_model(**kwargs):
print("Found pretrained model, loading...")
model = DeepEnergyModel.load_from_checkpoint(pretrained_filename)
else:
L.seed_everything(42)
pl.seed_everything(42)
model = DeepEnergyModel(**kwargs)
trainer.fit(model, train_loader, test_loader)
model = DeepEnergyModel.load_from_checkpoint(trainer.checkpoint_callback.best_model_path)
Expand Down Expand Up @@ -709,7 +709,7 @@ def train_model(**kwargs):

# %%
model.to(device)
L.seed_everything(43)
pl.seed_everything(43)
callback = GenerateCallback(batch_size=4, vis_steps=8, num_steps=256)
imgs_per_step = callback.generate_imgs(model)
imgs_per_step = imgs_per_step.cpu()
Expand Down Expand Up @@ -770,7 +770,7 @@ def train_model(**kwargs):
rand_imgs = torch.rand((128,) + model.hparams.img_shape).to(model.device)
rand_imgs = rand_imgs * 2 - 1.0
rand_out = model.cnn(rand_imgs).mean()
print(f"Average score for random images: {rand_out.item():4.2f}")
print(f"Average score for random images: {rand_out.item()}")

# %% [markdown]
# As we hoped, the model assigns very low probability to those noisy images.
Expand Down Expand Up @@ -803,8 +803,8 @@ def compare_images(img1, img2):
plt.xticks([(img1.shape[2] + 2) * (0.5 + j) for j in range(2)], labels=["Original image", "Transformed image"])
plt.yticks([])
plt.show()
print(f"Score original image: {score1:4.2f}")
print(f"Score transformed image: {score2:4.2f}")
print(f"Score original image: {score1}")
print(f"Score transformed image: {score2}")


# %% [markdown]
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ ignore = [
# TODO: we shall format all long comments as it comes from text cells
"E501", # Line too long
]
ignore-init-module-imports = true

[tool.ruff.lint.per-file-ignores]
"setup.py" = ["D100", "SIM115"]
Expand Down
Loading