@@ -113,7 +113,7 @@ def get_running_cuda_version() -> str:
113
113
return ""
114
114
115
115
116
- def get_running_torch_version ():
116
+ def get_running_torch_version () -> str :
117
117
"""Extract the version of actual PyTorch for this runtime."""
118
118
try :
119
119
import torch
@@ -154,7 +154,7 @@ class AssistantCLI:
154
154
"lightning_examples" : "Lightning-Examples" ,
155
155
"flash_tutorials" : "Kaggle" ,
156
156
}
157
- _BASH_SCRIPT_BASE = ("#!/bin/bash" , "set -e " , "" )
157
+ _BASH_SCRIPT_BASE = ("#!/bin/bash" , "set -ex " , "" )
158
158
_EXT_ARCHIVE_ZIP = (".zip" ,)
159
159
_EXT_ARCHIVE_TAR = (".tar" , ".gz" )
160
160
_EXT_ARCHIVE = _EXT_ARCHIVE_ZIP + _EXT_ARCHIVE_TAR
@@ -167,6 +167,7 @@ def _find_meta(folder: str) -> str:
167
167
168
168
Args:
169
169
folder: path to the folder with python script, meta and artefacts
170
+
170
171
"""
171
172
files = glob .glob (os .path .join (folder , AssistantCLI ._META_FILE_REGEX ), flags = glob .BRACE )
172
173
if len (files ) == 1 :
@@ -180,6 +181,7 @@ def _load_meta(folder: str, strict: bool = False) -> Optional[dict]:
180
181
Args:
181
182
folder: path to the folder with python script, meta and artefacts
182
183
strict: raise error if meta is missing required fields
184
+
183
185
"""
184
186
fpath = AssistantCLI ._find_meta (folder )
185
187
assert fpath , f"Missing meta file in folder: { folder } "
@@ -197,6 +199,7 @@ def _valid_conf_folder(folder: str) -> Tuple[str, str]:
197
199
198
200
Args:
199
201
folder: path to the folder with python script, meta and artefacts
202
+
200
203
"""
201
204
meta_files = [os .path .join (folder , f".meta.{ ext } " ) for ext in ("yml" , "yaml" )]
202
205
meta_files = [pf for pf in meta_files if os .path .isfile (pf )]
@@ -217,6 +220,7 @@ def _valid_folder(folder: str, ext: str) -> Tuple[str, str, str]:
217
220
Args:
218
221
folder: path to the folder with python script, meta and artefacts
219
222
ext: extension determining the stage - ".py" for python script nad ".ipynb" for notebook
223
+
220
224
"""
221
225
files = glob .glob (os .path .join (folder , f"*{ ext } " ))
222
226
if len (files ) != 1 :
@@ -231,6 +235,7 @@ def _valid_accelerator(folder: str) -> bool:
231
235
232
236
Args:
233
237
folder: path to the folder with python script, meta and artefacts
238
+
234
239
"""
235
240
meta = AssistantCLI ._load_meta (folder )
236
241
meta_accels = [acc .lower () for acc in meta .get ("accelerator" , AssistantCLI ._META_ACCEL_DEFAULT )]
@@ -243,6 +248,7 @@ def _parse_requirements(folder: str) -> Tuple[str, str]:
243
248
244
249
Args:
245
250
folder: path to the folder with python script, meta and artefacts
251
+
246
252
"""
247
253
meta = AssistantCLI ._load_meta (folder )
248
254
reqs = meta .get ("requirements" , [])
@@ -268,6 +274,7 @@ def _bash_download_data(folder: str) -> List[str]:
268
274
269
275
Args:
270
276
folder: path to the folder with python script, meta and artefacts
277
+
271
278
"""
272
279
meta = AssistantCLI ._load_meta (folder )
273
280
datasets = meta .get ("datasets" , {})
@@ -299,6 +306,7 @@ def bash_render(folder: str, output_file: str = PATH_SCRIPT_RENDER) -> Optional[
299
306
300
307
Returns:
301
308
string with nash script content
309
+
302
310
"""
303
311
cmd = list (AssistantCLI ._BASH_SCRIPT_BASE ) + [f"# Rendering: { folder } " ]
304
312
if not AssistantCLI .DRY_RUN :
@@ -314,7 +322,13 @@ def bash_render(folder: str, output_file: str = PATH_SCRIPT_RENDER) -> Optional[
314
322
# dry run does not execute the notebooks just takes them as they are
315
323
cmd .append (f"cp { ipynb_file } { pub_ipynb } " )
316
324
# copy and add meta config
317
- cmd += [f"cp { meta_file } { pub_meta } " , f"cat { pub_meta } " , f"git add { pub_meta } " ]
325
+ cmd += [
326
+ f"cp { meta_file } { pub_meta } " ,
327
+ 'echo "#====== START OF YAML FILE ======#"' ,
328
+ f"cat { pub_meta } " ,
329
+ 'echo "#======= END OF YAML FILE =======#"' ,
330
+ f"git add { pub_meta } " ,
331
+ ]
318
332
else :
319
333
pip_req , pip_args = AssistantCLI ._parse_requirements (folder )
320
334
cmd += [f"pip install { pip_req } --quiet { pip_args } " , "pip list" ]
@@ -327,7 +341,13 @@ def bash_render(folder: str, output_file: str = PATH_SCRIPT_RENDER) -> Optional[
327
341
# Export the actual packages used in runtime
328
342
cmd .append (f"meta_file=$(python .actions/assistant.py update-env-details { folder } )" )
329
343
# copy and add to version the enriched meta config
330
- cmd += ["echo $meta_file" , "cat $meta_file" , "git add $meta_file" ]
344
+ cmd += [
345
+ "echo $meta_file" ,
346
+ 'echo "#====== START OF YAML FILE ======#"' ,
347
+ "cat $meta_file" ,
348
+ 'echo "#======= END OF YAML FILE =======#"' ,
349
+ "git add $meta_file" ,
350
+ ]
331
351
# if thumb image is linked to the notebook, copy and version it too
332
352
if thumb_file :
333
353
cmd += [f"cp { thumb_file } { pub_thumb } " , f"git add { pub_thumb } " ]
@@ -339,7 +359,7 @@ def bash_render(folder: str, output_file: str = PATH_SCRIPT_RENDER) -> Optional[
339
359
fopen .write (os .linesep .join (cmd ))
340
360
341
361
@staticmethod
342
- def bash_test (folder : str , output_file : str = PATH_SCRIPT_TEST ) -> Optional [str ]:
362
+ def bash_test (folder : str , output_file : str = PATH_SCRIPT_TEST , virtualenv : bool = False ) -> Optional [str ]:
343
363
"""Prepare bash script for running tests of a particular notebook.
344
364
345
365
Args:
@@ -348,18 +368,20 @@ def bash_test(folder: str, output_file: str = PATH_SCRIPT_TEST) -> Optional[str]
348
368
349
369
Returns:
350
370
string with nash script content
371
+
351
372
"""
352
373
cmd = list (AssistantCLI ._BASH_SCRIPT_BASE ) + [f"# Testing: { folder } " ]
353
374
cmd += AssistantCLI ._bash_download_data (folder )
354
375
ipynb_file , meta_file , _ = AssistantCLI ._valid_folder (folder , ext = ".ipynb" )
355
376
356
377
# prepare isolated environment with inheriting the global packages
357
378
path_venv = os .path .join (folder , "venv" )
358
- cmd += [
359
- f"python -m virtualenv --system-site-packages { path_venv } " ,
360
- f"source { os .path .join (path_venv , 'bin' , 'activate' )} " ,
361
- "pip --version" ,
362
- ]
379
+ if virtualenv :
380
+ cmd += [
381
+ f"python -m virtualenv --system-site-packages { path_venv } " ,
382
+ f"source { os .path .join (path_venv , 'bin' , 'activate' )} " ,
383
+ "pip --version" ,
384
+ ]
363
385
364
386
cmd .append (f"# available: { AssistantCLI .DEVICE_ACCELERATOR } " )
365
387
if AssistantCLI ._valid_accelerator (folder ):
@@ -369,21 +391,30 @@ def bash_test(folder: str, output_file: str = PATH_SCRIPT_TEST) -> Optional[str]
369
391
# Export the actual packages used in runtime
370
392
cmd .append (f"meta_file=$(python .actions/assistant.py update-env-details { folder } --base_path .)" )
371
393
# show created meta config
372
- cmd += ["echo $meta_file" , "cat $meta_file" ]
373
- cmd .append (f"python -m pytest { ipynb_file } -v --nbval --nbval-cell-timeout=300" )
394
+ cmd += [
395
+ "echo $meta_file" ,
396
+ 'echo "#====== START OF YAML FILE ======#"' ,
397
+ "cat $meta_file" ,
398
+ 'echo "#======= END OF YAML FILE =======#"' ,
399
+ ]
400
+ # use standard jupyter's executable via CMD
401
+ cmd .append (f"jupyter execute { ipynb_file } --inplace" )
374
402
else :
375
403
pub_ipynb = os .path .join (DIR_NOTEBOOKS , f"{ folder } .ipynb" )
376
404
pub_meta = pub_ipynb .replace (".ipynb" , ".yaml" )
377
405
# copy and add meta config
378
406
cmd += [
379
407
f"mkdir -p { os .path .dirname (pub_meta )} " ,
380
408
f"cp { meta_file } { pub_meta } " ,
409
+ 'echo "#====== START OF YAML FILE ======#"' ,
381
410
f"cat { pub_meta } " ,
411
+ 'echo "#======= END OF YAML FILE =======#"' ,
382
412
f"git add { pub_meta } " ,
383
413
]
384
414
warn ("Invalid notebook's accelerator for this device. So no tests will be run!!!" , RuntimeWarning )
385
415
# deactivate and clean local environment
386
- cmd += ["deactivate" , f"rm -rf { os .path .join (folder , 'venv' )} " ]
416
+ if virtualenv :
417
+ cmd += ["deactivate" , f"rm -rf { os .path .join (folder , 'venv' )} " ]
387
418
if not output_file :
388
419
return os .linesep .join (cmd )
389
420
with open (output_file , "w" ) as fopen :
@@ -395,6 +426,7 @@ def convert_ipynb(folder: str) -> None:
395
426
396
427
Args:
397
428
folder: folder with python script
429
+
398
430
"""
399
431
fpath , _ , _ = AssistantCLI ._valid_folder (folder , ext = ".py" )
400
432
with open (fpath ) as fopen :
@@ -426,6 +458,7 @@ def _replace_images(lines: list, local_dir: str) -> list:
426
458
Args:
427
459
lines: string lines from python script
428
460
local_dir: relative path to the folder with script
461
+
429
462
"""
430
463
md = os .linesep .join ([ln .rstrip () for ln in lines ])
431
464
p_imgs = []
@@ -488,6 +521,7 @@ def group_folders(
488
521
Example:
489
522
$ python assistant.py group-folders ../target-diff.txt \
490
523
--fpath_actual_dirs "['../dirs-main.txt', '../dirs-publication.txt']"
524
+
491
525
"""
492
526
with open (fpath_gitdiff ) as fopen :
493
527
changed = [ln .strip () for ln in fopen .readlines ()]
@@ -534,6 +568,7 @@ def generate_matrix(fpath_change_folders: str, json_indent: Optional[int] = None
534
568
Args:
535
569
fpath_change_folders: output of previous ``group_folders``
536
570
json_indent: makes the json more readable, recommendation is 4
571
+
537
572
"""
538
573
with open (fpath_change_folders ) as fopen :
539
574
folders = [ln .strip () for ln in fopen .readlines ()]
@@ -623,6 +658,7 @@ def copy_notebooks(
623
658
path_docs_images: destination path to the images' location relative to ``docs_root``
624
659
patterns: patterns to use when glob-ing notebooks
625
660
ignore: ignore some specific notebooks even when the given string is in path
661
+
626
662
"""
627
663
all_ipynb = []
628
664
for pattern in patterns :
@@ -693,7 +729,11 @@ def update_env_details(folder: str, base_path: str = DIR_NOTEBOOKS) -> str:
693
729
694
730
Args:
695
731
folder: path to the folder
696
- base_path:
732
+ base_path: base path with notebooks
733
+
734
+ Returns:
735
+ path the updated YAML file
736
+
697
737
"""
698
738
meta = AssistantCLI ._load_meta (folder )
699
739
# default is COU runtime
0 commit comments