File tree Expand file tree Collapse file tree 8 files changed +46
-13
lines changed
template-text-classification
template-vision-classification
template-vision-segmentation Expand file tree Collapse file tree 8 files changed +46
-13
lines changed Original file line number Diff line number Diff line change 4747 - name : Install dependencies
4848 run : |
4949 pip install wheel setuptools pip -Uqq
50- pip install --pre pytorch-ignite
5150 pip install -r ./scripts/requirements.txt -f https://download.pytorch.org/whl/cpu/torch_stable.html --progress-bar off
5251 pip uninstall -y tqdm
5352 npm i -g pnpm
9897 restore-keys : |
9998 pnpm-and-pip-cache-
10099
101- - run : pip install -Uq pip wheel && pip install -Uq "black==20.8b1" "isort==5.7.0"
100+ - run : pip install -Uq pip wheel && sh scripts/run_code_style.sh install
102101 - run : npm i -g pnpm
103102 - run : pnpm i --frozen-lockfile --color
104103 - run : pnpm lint
Original file line number Diff line number Diff line change @@ -5,9 +5,10 @@ set -xeu
55if [ $1 == " lint" ]; then
66 black . -l 80 --check
77 isort . --profile black --check
8+ flake8 --select F401 . # find unused imports
89elif [ $1 == " fmt" ]; then
910 isort . --profile black
1011 black . -l 80
1112elif [ $1 == " install" ]; then
12- pip install " black==20.8b1" " isort==5.7.0"
13+ pip install " black==20.8b1" " isort==5.7.0" flake8
1314fi
Original file line number Diff line number Diff line change 44
55#::: if (it.logger) { :::#
66if rank == 0 :
7- exp_logger .close ()
7+ from ignite .contrib .handlers .wandb_logger import WandBLogger
8+
9+ if isinstance (exp_logger , WandBLogger ):
10+ # why handle differently for wandb?
11+ # See: https://github.com/pytorch/ignite/issues/1894
12+ exp_logger .finish ()
13+ elif exp_logger :
14+ exp_logger .close ()
815#::: } :::#
916
1017#::: if (it.save_training || it.save_evaluation) { :::#
Original file line number Diff line number Diff line change 55import ignite .distributed as idist
66import yaml
77from data import setup_data
8+ from ignite .contrib .handlers import LRScheduler , PiecewiseLinear
89from ignite .engine import Events
9- from ignite .handlers .param_scheduler import LRScheduler , PiecewiseLinear
1010from ignite .metrics import Accuracy , Loss
1111from ignite .utils import manual_seed
1212from models import TransformerModel
@@ -157,7 +157,14 @@ def _():
157157 #::: if (it.logger) { :::#
158158 # close logger
159159 if rank == 0 :
160- exp_logger .close ()
160+ from ignite .contrib .handlers .wandb_logger import WandBLogger
161+
162+ if isinstance (exp_logger , WandBLogger ):
163+ # why handle differently for wandb?
164+ # See: https://github.com/pytorch/ignite/issues/1894
165+ exp_logger .finish ()
166+ elif exp_logger :
167+ exp_logger .close ()
161168 #::: } :::#
162169 #
163170 #::: if (it.save_training || it.save_evaluation) { :::#
Original file line number Diff line number Diff line change @@ -113,7 +113,14 @@ def _():
113113 #::: if (it.logger) { :::#
114114 # close logger
115115 if rank == 0 :
116- exp_logger .close ()
116+ from ignite .contrib .handlers .wandb_logger import WandBLogger
117+
118+ if isinstance (exp_logger , WandBLogger ):
119+ # why handle differently for wandb?
120+ # See: https://github.com/pytorch/ignite/issues/1894
121+ exp_logger .finish ()
122+ elif exp_logger :
123+ exp_logger .close ()
117124 #::: } :::#
118125 #
119126 #::: if (it.save_training || it.save_evaluation) { :::#
Original file line number Diff line number Diff line change 1010from ignite .utils import manual_seed
1111from models import Discriminator , Generator
1212from torch import nn , optim
13- from torch .utils .data .distributed import DistributedSampler
1413from trainers import setup_evaluator , setup_trainer
1514from utils import *
1615
@@ -168,7 +167,14 @@ def _():
168167 #::: if (it.logger) { :::#
169168 # close logger
170169 if rank == 0 :
171- exp_logger .close ()
170+ from ignite .contrib .handlers .wandb_logger import WandBLogger
171+
172+ if isinstance (exp_logger , WandBLogger ):
173+ # why handle differently for wandb?
174+ # See: https://github.com/pytorch/ignite/issues/1894
175+ exp_logger .finish ()
176+ elif exp_logger :
177+ exp_logger .close ()
172178 #::: } :::#
173179 #
174180 #::: if (it.save_training || it.save_evaluation) { :::#
Original file line number Diff line number Diff line change 11import os
22from argparse import Namespace
3- from numbers import Number
43from typing import Iterable
54
65import ignite .distributed as idist
Original file line number Diff line number Diff line change 44
55import ignite .distributed as idist
66import yaml
7- from data import denormalize , download_datasets , setup_data
7+ from data import denormalize , setup_data
8+ from ignite .contrib .handlers import LRScheduler
89from ignite .engine import Events
9- from ignite .handlers .param_scheduler import LRScheduler
1010from ignite .metrics import ConfusionMatrix , IoU , mIoU
1111from ignite .utils import manual_seed
1212from models import setup_model
@@ -176,7 +176,14 @@ def _():
176176 #::: if (it.logger) { :::#
177177 # close logger
178178 if rank == 0 :
179- exp_logger .close ()
179+ from ignite .contrib .handlers .wandb_logger import WandBLogger
180+
181+ if isinstance (exp_logger , WandBLogger ):
182+ # why handle differently for wandb?
183+ # See: https://github.com/pytorch/ignite/issues/1894
184+ exp_logger .finish ()
185+ elif exp_logger :
186+ exp_logger .close ()
180187 #::: } :::#
181188 #
182189 #::: if (it.save_training || it.save_evaluation) { :::#
You can’t perform that action at this time.
0 commit comments