Skip to content

Commit fd89f9a

Browse files
author
Azure Pipelines
committed
Merge remote-tracking branch 'origin/main' into publication
2 parents d4c775a + 8055e90 commit fd89f9a

File tree

6 files changed

+55
-32
lines changed

6 files changed

+55
-32
lines changed

.actions/assistant.py

Lines changed: 21 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
_PATH_HERE = os.path.dirname(__file__)
2020
_PATH_ROOT = os.path.dirname(_PATH_HERE)
2121
PATH_SCRIPT_RENDER = os.path.join(_PATH_HERE, "_ipynb-render.sh")
22-
PATH_SCRIPT_TEST = os.path.join(_PATH_HERE, "_ipynb-test.sh")
22+
PATH_SCRIPT_TEST = os.path.join(_PATH_HERE, "_ipynb-validate.sh")
2323
# https://askubuntu.com/questions/909918/how-to-show-unzip-progress
2424
UNZIP_PROGRESS_BAR = ' | awk \'BEGIN {ORS=" "} {if(NR%10==0)print "."}\''
2525
REPO_NAME = "lightning-tutorials"
@@ -393,7 +393,7 @@ def bash_render(folder: str, output_file: str = PATH_SCRIPT_RENDER) -> Optional[
393393
fopen.write(os.linesep.join(cmd))
394394

395395
@staticmethod
396-
def bash_test(folder: str, output_file: str = PATH_SCRIPT_TEST, virtualenv: bool = False) -> Optional[str]:
396+
def bash_validate(folder: str, output_file: str = PATH_SCRIPT_TEST, virtualenv: bool = False) -> Optional[str]:
397397
"""Prepare bash script for running tests of a particular notebook.
398398
399399
Args:
@@ -551,7 +551,7 @@ def group_folders(
551551
fpath_drop_folders: output file with deleted folders
552552
fpath_actual_dirs: files with listed all folder in particular stat
553553
strict: raise error if some folder outside skipped does not have valid meta file
554-
root_path: path to the root tobe added for all local folder paths in files
554+
root_path: path to the root to be added for all local folder paths in files
555555
556556
Example:
557557
$ python assistant.py group-folders ../target-diff.txt \
@@ -571,12 +571,26 @@ def group_folders(
571571
# not empty paths
572572
dirs = [ln for ln in dirs if ln]
573573

574-
if root_path:
575-
dirs = [os.path.join(root_path, d) for d in dirs]
576-
# unique folders
577-
dirs = set(dirs)
578574
# drop folder that start with . or _ as they are meant to be internal use only
579575
dirs = [pdir for pdir in dirs if not any(ndir[0] in (".", "_") for ndir in pdir.split(os.path.sep))]
576+
# append a path to root in case you call this from other path then root
577+
if root_path:
578+
dirs = [os.path.join(root_path, d) for d in dirs]
579+
# append all subfolders in case of parent requirements has been changed all related notebooks shall be updated
580+
dirs_expanded = []
581+
for dir in dirs:
582+
# in case that the diff item comes from removed folder
583+
if not os.path.isdir(dir):
584+
dirs_expanded += [dir]
585+
continue
586+
# list folder and skip all internal files, starting with . or _
587+
sub_dirs = [os.path.join(dir, it) for it in os.listdir(dir) if it[0] not in (".", "_")]
588+
# filter only folders
589+
sub_dirs = [it for it in sub_dirs if os.path.isdir(it)]
590+
# if the dir has sub-folder append then otherwise append the dir itself
591+
dirs_expanded += sub_dirs if sub_dirs else [dir]
592+
# unique folders only, drop duplicates
593+
dirs = set(dirs_expanded)
580594
# valid folder has meta
581595
dirs_exist = [d for d in dirs if os.path.isdir(d)]
582596
dirs_invalid = [d for d in dirs_exist if not AssistantCLI._find_meta(d)]

.actions/test_cli.py

Lines changed: 19 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,25 @@ def _path_in_dir(fname: str, folder: str = _PATH_ROOT) -> str:
1414

1515

1616
@pytest.mark.parametrize(
17-
"cmd,args",
17+
"cmd,kwargs",
1818
[
19-
("list_dirs", []),
20-
("list_dirs", [".", ".ipynb"]),
21-
("bash_render", [_PATH_DIR_SIMPLE]),
22-
("bash_test", [_PATH_DIR_SIMPLE]),
23-
("group_folders", [_path_in_dir("master-diff.txt"), _path_in_dir("dirs-b1.txt"), _path_in_dir("dirs-b2.txt")]),
24-
("convert_ipynb", [_PATH_DIR_SIMPLE]),
25-
("copy_notebooks", [_PATH_ROOT]),
26-
("update_env_details", [_PATH_DIR_SIMPLE]),
19+
("list_dirs", {}),
20+
("list_dirs", dict(folder=".", include_file_ext=".ipynb")),
21+
("bash_render", dict(folder=_PATH_DIR_SIMPLE)),
22+
("bash_validate", dict(folder=_PATH_DIR_SIMPLE)),
23+
(
24+
"group_folders",
25+
dict(
26+
fpath_gitdiff=_path_in_dir("master-diff.txt"),
27+
fpath_change_folders=_path_in_dir("dirs-b1.txt"),
28+
fpath_drop_folders=_path_in_dir("dirs-b2.txt"),
29+
root_path=_PATH_ROOT,
30+
),
31+
),
32+
("convert_ipynb", dict(folder=_PATH_DIR_SIMPLE)),
33+
("copy_notebooks", dict(path_root=_PATH_ROOT)),
34+
("update_env_details", dict(folder=_PATH_DIR_SIMPLE)),
2735
],
2836
)
29-
def test_assistant_commands(cmd: str, args: list):
30-
AssistantCLI().__getattribute__(cmd)(*args)
37+
def test_assistant_commands(cmd: str, kwargs: dict):
38+
AssistantCLI().__getattribute__(cmd)(**kwargs)

.azure/ipynb-validate.yml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ jobs:
2525
# printf "Head: $head\n" # this shall be commit hash
2626
# git diff --name-only $head --output=target-diff.txt
2727
git diff --name-only origin/main HEAD --output=target-diff.txt
28+
printf "Diff to target:\n"
2829
cat target-diff.txt
2930
python .actions/assistant.py group-folders --fpath_gitdiff=target-diff.txt
3031
printf "Changed folders:\n"
@@ -97,9 +98,9 @@ jobs:
9798
- bash: |
9899
set -e
99100
mkdir $(PATH_DATASETS)
100-
python .actions/assistant.py bash-test $(notebook)
101-
cat .actions/_ipynb-test.sh
102-
bash .actions/_ipynb-test.sh
101+
python .actions/assistant.py bash-validate $(notebook)
102+
cat .actions/_ipynb-validate.sh
103+
bash .actions/_ipynb-validate.sh
103104
env:
104105
KAGGLE_USERNAME: $(KAGGLE_USERNAME)
105106
KAGGLE_KEY: $(KAGGLE_KEY)

_dockers/ubuntu-cuda/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ ARG CUDA_VERSION=11.8.0
1919
FROM nvidia/cuda:${CUDA_VERSION}-runtime-ubuntu${UBUNTU_VERSION}
2020

2121
ARG PYTHON_VERSION=3.10
22-
ARG PYTORCH_VERSION=2.0
22+
ARG PYTORCH_VERSION=2.3
2323

2424
SHELL ["/bin/bash", "-c"]
2525
# https://techoverflow.net/2019/05/18/how-to-fix-configuring-tzdata-interactive-input-when-building-docker-images/

course_UvA-DL/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
numpy <2.0 # needed for older Torch
22
torch>=1.8.1, <2.1.0
33
pytorch-lightning>=2.0, <2.1.0
4-
torchmetrics>=0.7, <1.3
4+
torchmetrics>=1.0, <1.3

lightning_examples/cifar10-baseline/baseline.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from lightning.pytorch.callbacks import LearningRateMonitor
1717
from lightning.pytorch.loggers import CSVLogger
1818
from torch.optim.lr_scheduler import OneCycleLR
19-
from torch.optim.swa_utils import AveragedModel, update_bn
19+
from torch.optim.swa_utils import AveragedModel
2020
from torch.utils.data import DataLoader, random_split
2121
from torchmetrics.functional import accuracy
2222
from torchvision.datasets import CIFAR10
@@ -173,15 +173,15 @@ def configure_optimizers(self):
173173
model = LitResnet(lr=0.05)
174174

175175
trainer = L.Trainer(
176-
max_epochs=30,
176+
max_epochs=5,
177177
accelerator="auto",
178178
devices=1,
179179
logger=CSVLogger(save_dir="logs/"),
180180
callbacks=[LearningRateMonitor(logging_interval="step")],
181181
)
182182

183-
trainer.fit(model, train_dataloader, val_dataloaders=val_dataloader)
184-
trainer.test(model, test_dataloader)
183+
trainer.fit(model, train_dataloaders=train_dataloader, val_dataloaders=val_dataloader)
184+
trainer.test(model, dataloaders=test_dataloader)
185185

186186
# %%
187187

@@ -229,22 +229,22 @@ def configure_optimizers(self):
229229
optimizer = torch.optim.SGD(self.model.parameters(), lr=self.hparams.lr, momentum=0.9, weight_decay=5e-4)
230230
return optimizer
231231

232-
def on_train_end(self):
233-
update_bn(self.trainer.datamodule.train_dataloader(), self.swa_model, device=self.device)
232+
# def on_train_end(self): # todo: failing as trainer has only dataloaders, not datamodules
233+
# update_bn(self.trainer.datamodule.train_dataloader(), self.swa_model, device=self.device)
234234

235235

236236
# %%
237237
swa_model = SWAResnet(model.model, lr=0.01)
238238

239239
swa_trainer = L.Trainer(
240-
max_epochs=20,
240+
max_epochs=5,
241241
accelerator="auto",
242242
devices=1,
243243
logger=CSVLogger(save_dir="logs/"),
244244
)
245245

246-
swa_trainer.fit(swa_model, train_dataloader, val_dataloaders=val_dataloader)
247-
swa_trainer.test(swa_model, test_dataloader)
246+
swa_trainer.fit(swa_model, train_dataloaders=train_dataloader, val_dataloaders=val_dataloader)
247+
swa_trainer.test(swa_model, dataloaders=test_dataloader)
248248

249249
# %%
250250

0 commit comments

Comments
 (0)