Skip to content

Commit 6105c81

Browse files
use Black (#81)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 25341f2 commit 6105c81

File tree

22 files changed

+535
-658
lines changed

22 files changed

+535
-658
lines changed

.actions/helpers.py

+36-37
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
PUBLIC_BRANCH = "publication"
2525
URL_DOWNLOAD = f"https://github.com/PyTorchLightning/{REPO_NAME}/raw/{DEFAULT_BRANCH}"
2626
ENV_DEVICE = "ACCELERATOR"
27-
DEVICE_ACCELERATOR = os.environ.get(ENV_DEVICE, 'cpu').lower()
27+
DEVICE_ACCELERATOR = os.environ.get(ENV_DEVICE, "cpu").lower()
2828
TEMPLATE_HEADER = f"""# %%%% [markdown]
2929
#
3030
# # %(title)s
@@ -92,7 +92,7 @@
9292
def default_requirements(path_req: str = PATH_REQ_DEFAULT) -> list:
9393
with open(path_req) as fp:
9494
req = fp.readlines()
95-
req = [r[:r.index("#")] if "#" in r else r for r in req]
95+
req = [r[: r.index("#")] if "#" in r else r for r in req]
9696
req = [r.strip() for r in req]
9797
req = [r for r in req if r]
9898
return req
@@ -101,6 +101,7 @@ def default_requirements(path_req: str = PATH_REQ_DEFAULT) -> list:
101101
def get_running_cuda_version() -> str:
102102
try:
103103
import torch
104+
104105
return torch.version.cuda or ""
105106
except ImportError:
106107
return ""
@@ -109,8 +110,9 @@ def get_running_cuda_version() -> str:
109110
def get_running_torch_version():
110111
try:
111112
import torch
113+
112114
ver = torch.__version__
113-
return ver[:ver.index('+')] if '+' in ver else ver
115+
return ver[: ver.index("+")] if "+" in ver else ver
114116
except ImportError:
115117
return ""
116118

@@ -119,8 +121,8 @@ def get_running_torch_version():
119121
CUDA_VERSION = get_running_cuda_version()
120122
RUNTIME_VERSIONS = dict(
121123
TORCH_VERSION_FULL=TORCH_VERSION,
122-
TORCH_VERSION=TORCH_VERSION[:TORCH_VERSION.index('+')] if '+' in TORCH_VERSION else TORCH_VERSION,
123-
TORCH_MAJOR_DOT_MINOR='.'.join(TORCH_VERSION.split('.')[:2]),
124+
TORCH_VERSION=TORCH_VERSION[: TORCH_VERSION.index("+")] if "+" in TORCH_VERSION else TORCH_VERSION,
125+
TORCH_MAJOR_DOT_MINOR=".".join(TORCH_VERSION.split(".")[:2]),
124126
CUDA_VERSION=CUDA_VERSION,
125127
CUDA_MAJOR_MINOR=CUDA_VERSION.replace(".", ""),
126128
DEVICE=f"cu{CUDA_VERSION.replace('.', '')}" if CUDA_VERSION else "cpu",
@@ -130,7 +132,7 @@ def get_running_torch_version():
130132
class HelperCLI:
131133

132134
DIR_NOTEBOOKS = ".notebooks"
133-
META_REQUIRED_FIELDS = ('title', 'author', 'license', 'description')
135+
META_REQUIRED_FIELDS = ("title", "author", "license", "description")
134136
SKIP_DIRS = (
135137
".actions",
136138
".azure-pipelines",
@@ -144,7 +146,7 @@ class HelperCLI:
144146
META_FILE_REGEX = ".meta.{yaml,yml}"
145147
REQUIREMENTS_FILE = "requirements.txt"
146148
PIP_ARGS_FILE = "pip_arguments.txt"
147-
META_PIP_KEY = 'pip__'
149+
META_PIP_KEY = "pip__"
148150

149151
@staticmethod
150152
def _meta_file(folder: str) -> str:
@@ -171,7 +173,7 @@ def augment_script(fpath: str):
171173
generated=datetime.now().isoformat(),
172174
)
173175

174-
meta['description'] = meta['description'].replace(os.linesep, f"{os.linesep}# ")
176+
meta["description"] = meta["description"].replace(os.linesep, f"{os.linesep}# ")
175177

176178
header = TEMPLATE_HEADER % meta
177179
requires = set(default_requirements() + meta["requirements"])
@@ -203,22 +205,22 @@ def _replace_images(lines: list, local_dir: str) -> list:
203205
url_path = p_img
204206
im = requests.get(p_img, stream=True).raw.read()
205207
else:
206-
url_path = '/'.join([URL_DOWNLOAD, local_dir, p_img])
208+
url_path = "/".join([URL_DOWNLOAD, local_dir, p_img])
207209
p_local_img = os.path.join(local_dir, p_img)
208210
with open(p_local_img, "rb") as fp:
209211
im = fp.read()
210212
im_base64 = base64.b64encode(im).decode("utf-8")
211213
_, ext = os.path.splitext(p_img)
212214
md = md.replace(f'src="{p_img}"', f'src="{url_path}"')
213-
md = md.replace(f']({p_img})', f'](data:image/{ext[1:]};base64,{im_base64})')
215+
md = md.replace(f"]({p_img})", f"](data:image/{ext[1:]};base64,{im_base64})")
214216

215217
return [ln + os.linesep for ln in md.split(os.linesep)]
216218

217219
@staticmethod
218220
def _is_ipynb_parent_dir(dir_path: str) -> bool:
219221
if HelperCLI._meta_file(dir_path):
220222
return True
221-
sub_dirs = [d for d in glob.glob(os.path.join(dir_path, '*')) if os.path.isdir(d)]
223+
sub_dirs = [d for d in glob.glob(os.path.join(dir_path, "*")) if os.path.isdir(d)]
222224
return any(HelperCLI._is_ipynb_parent_dir(d) for d in sub_dirs)
223225

224226
@staticmethod
@@ -296,15 +298,14 @@ def parse_requirements(dir_path: str):
296298
meta = yaml.safe_load(open(fpath))
297299
pprint(meta)
298300

299-
req = meta.get('requirements', [])
301+
req = meta.get("requirements", [])
300302
fname = os.path.join(dir_path, HelperCLI.REQUIREMENTS_FILE)
301303
print(f"File for requirements: {fname}")
302304
with open(fname, "w") as fp:
303305
fp.write(os.linesep.join(req))
304306

305307
pip_args = {
306-
k.replace(HelperCLI.META_PIP_KEY, ''): v
307-
for k, v in meta.items() if k.startswith(HelperCLI.META_PIP_KEY)
308+
k.replace(HelperCLI.META_PIP_KEY, ""): v for k, v in meta.items() if k.startswith(HelperCLI.META_PIP_KEY)
308309
}
309310
cmd_args = []
310311
for pip_key in pip_args:
@@ -327,33 +328,31 @@ def _get_card_item_cell(path_ipynb: str) -> Dict[str, Any]:
327328

328329
# Clamp description length
329330
wrapped_description = wrap(
330-
meta.get('short_description', meta['description']).strip().replace(os.linesep, " "), 175
331+
meta.get("short_description", meta["description"]).strip().replace(os.linesep, " "), 175
331332
)
332333
suffix = "..." if len(wrapped_description) > 1 else ""
333-
meta['short_description'] = wrapped_description[0] + suffix
334+
meta["short_description"] = wrapped_description[0] + suffix
334335

335336
# Resolve some default tags based on accelerators and directory name
336-
meta['tags'] = meta.get('tags', [])
337+
meta["tags"] = meta.get("tags", [])
337338

338-
accelerators = meta.get("accelerator", ('CPU', ))
339-
if ('GPU' in accelerators) or ('TPU' in accelerators):
340-
meta['tags'].append('GPU/TPU')
339+
accelerators = meta.get("accelerator", ("CPU",))
340+
if ("GPU" in accelerators) or ("TPU" in accelerators):
341+
meta["tags"].append("GPU/TPU")
341342

342343
dirname = os.path.basename(os.path.dirname(path_ipynb))
343344
if dirname != ".notebooks":
344-
meta['tags'].append(dirname)
345+
meta["tags"].append(dirname)
345346

346-
meta['tags'] = ",".join(meta['tags'])
347+
meta["tags"] = ",".join(meta["tags"])
347348

348349
# Build the notebook cell
349350
rst_cell = TEMPLATE_CARD_ITEM % meta
350351

351352
return {
352353
"cell_type": "raw",
353-
"metadata": {
354-
"raw_mimetype": "text/restructuredtext"
355-
},
356-
"source": rst_cell.strip().splitlines(True)
354+
"metadata": {"raw_mimetype": "text/restructuredtext"},
355+
"source": rst_cell.strip().splitlines(True),
357356
}
358357

359358
@staticmethod
@@ -365,27 +364,27 @@ def copy_notebooks(path_root: str, path_docs_ipynb: str = "docs/source/notebooks
365364
path_docs_ipynb: destination path to the notebooks location
366365
"""
367366
ls_ipynb = []
368-
for sub in (['*.ipynb'], ['**', '*.ipynb']):
367+
for sub in (["*.ipynb"], ["**", "*.ipynb"]):
369368
ls_ipynb += glob.glob(os.path.join(path_root, HelperCLI.DIR_NOTEBOOKS, *sub))
370369

371370
os.makedirs(path_docs_ipynb, exist_ok=True)
372371
ipynb_content = []
373372
for path_ipynb in tqdm.tqdm(ls_ipynb):
374373
ipynb = path_ipynb.split(os.path.sep)
375-
sub_ipynb = os.path.sep.join(ipynb[ipynb.index(HelperCLI.DIR_NOTEBOOKS) + 1:])
374+
sub_ipynb = os.path.sep.join(ipynb[ipynb.index(HelperCLI.DIR_NOTEBOOKS) + 1 :])
376375
new_ipynb = os.path.join(path_docs_ipynb, sub_ipynb)
377376
os.makedirs(os.path.dirname(new_ipynb), exist_ok=True)
378-
print(f'{path_ipynb} -> {new_ipynb}')
377+
print(f"{path_ipynb} -> {new_ipynb}")
379378

380379
with open(path_ipynb) as f:
381380
ipynb = json.load(f)
382381

383382
ipynb["cells"].append(HelperCLI._get_card_item_cell(path_ipynb))
384383

385-
with open(new_ipynb, 'w') as f:
384+
with open(new_ipynb, "w") as f:
386385
json.dump(ipynb, f)
387386

388-
ipynb_content.append(os.path.join('notebooks', sub_ipynb))
387+
ipynb_content.append(os.path.join("notebooks", sub_ipynb))
389388

390389
@staticmethod
391390
def valid_accelerator(dir_path: str):
@@ -397,7 +396,7 @@ def valid_accelerator(dir_path: str):
397396
assert fpath, f"Missing Meta file in {dir_path}"
398397
meta = yaml.safe_load(open(fpath))
399398
# default is CPU runtime
400-
accels = [acc.lower() for acc in meta.get("accelerator", ('CPU'))]
399+
accels = [acc.lower() for acc in meta.get("accelerator", ("CPU"))]
401400
dev_accels = DEVICE_ACCELERATOR.split(",")
402401
return int(any(ac in accels for ac in dev_accels))
403402

@@ -413,7 +412,7 @@ def update_env_details(dir_path: str):
413412
# default is COU runtime
414413
with open(PATH_REQ_DEFAULT) as fp:
415414
req = fp.readlines()
416-
req += meta.get('requirements', [])
415+
req += meta.get("requirements", [])
417416
req = [r.strip() for r in req]
418417

419418
def _parse(pkg: str, keys: str = " <=>") -> str:
@@ -425,12 +424,12 @@ def _parse(pkg: str, keys: str = " <=>") -> str:
425424

426425
require = {_parse(r) for r in req if r}
427426
env = {_parse(p): p for p in freeze.freeze()}
428-
meta['environment'] = [env[r] for r in require]
429-
meta['published'] = datetime.now().isoformat()
427+
meta["environment"] = [env[r] for r in require]
428+
meta["published"] = datetime.now().isoformat()
430429

431430
fmeta = os.path.join(HelperCLI.DIR_NOTEBOOKS, dir_path) + ".yaml"
432-
yaml.safe_dump(meta, stream=open(fmeta, 'w'), sort_keys=False)
431+
yaml.safe_dump(meta, stream=open(fmeta, "w"), sort_keys=False)
433432

434433

435-
if __name__ == '__main__':
434+
if __name__ == "__main__":
436435
fire.Fire(HelperCLI)

.github/workflows/ci_code-format.yml

-27
Original file line numberDiff line numberDiff line change
@@ -24,33 +24,6 @@ jobs:
2424
run: |
2525
flake8 .
2626
27-
imports-check-isort:
28-
runs-on: ubuntu-20.04
29-
steps:
30-
- uses: actions/checkout@master
31-
- uses: actions/setup-python@v2
32-
with:
33-
python-version: 3.8
34-
- name: Install isort
35-
run: pip install isort && pip list
36-
- name: isort
37-
run: |
38-
isort . --check --diff
39-
40-
format-check-yapf:
41-
runs-on: ubuntu-20.04
42-
steps:
43-
- uses: actions/checkout@master
44-
- uses: actions/setup-python@v2
45-
with:
46-
python-version: 3.8
47-
- name: Install dependencies
48-
run: pip install yapf && pip list
49-
shell: bash
50-
- name: yapf
51-
run: |
52-
yapf --diff --parallel --recursive .
53-
5427
pre-commit-check:
5528
runs-on: ubuntu-latest
5629
steps:

.pre-commit-config.yaml

+8-7
Original file line numberDiff line numberDiff line change
@@ -40,10 +40,11 @@ repos:
4040
hooks:
4141
- id: isort
4242

43-
- repo: https://github.com/pre-commit/mirrors-yapf
44-
rev: v0.31.0
43+
- repo: https://github.com/psf/black
44+
rev: 21.7b0
4545
hooks:
46-
- id: yapf
46+
- id: black
47+
name: Format code
4748

4849
- repo: https://github.com/asottile/yesqa
4950
rev: v1.2.3
@@ -59,7 +60,7 @@ repos:
5960
- mdformat-black
6061
- mdformat_frontmatter
6162

62-
- repo: https://github.com/PyCQA/flake8
63-
rev: 3.9.2
64-
hooks:
65-
- id: flake8
63+
# - repo: https://github.com/PyCQA/flake8
64+
# rev: 3.9.2
65+
# hooks:
66+
# - id: flake8

0 commit comments

Comments
 (0)