-
Notifications
You must be signed in to change notification settings - Fork 1
/
train.py
30 lines (26 loc) · 938 Bytes
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import os
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning import seed_everything, Trainer
from omegaconf import DictConfig, OmegaConf
import hydra
from hydra.utils import instantiate
from pytorch_lightning.plugins import DDPPlugin
from pytorch_lightning.loggers import WandbLogger
@hydra.main()
def main(cfg: DictConfig):
seed_everything(42)
logger = WandbLogger(**cfg.logger)
logger.log_hyperparams(OmegaConf.to_container(cfg, resolve=True))
checkpoint = ModelCheckpoint(**cfg.checkpoint, dirpath=logger.save_dir)
trainer = Trainer(
**cfg.trainer,
logger=logger,
callbacks=checkpoint,
plugins=DDPPlugin(find_unused_parameters=True)
)
task = instantiate(cfg.task)
datamodule = instantiate(cfg.data)
trainer.fit(model=task, datamodule=datamodule)
if __name__ == '__main__':
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
main()