Skip to content

Commit

Permalink
commit
Browse files Browse the repository at this point in the history
  • Loading branch information
Jintao-Huang committed Nov 16, 2022
1 parent 141a248 commit 0ab2736
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 9 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ pip install "transformers>=4.23.*" "datasets>=2.6.*"
python examples/nlp.py

### dqn.py
pip install "gym>=0.26.*" pygame
pip install "gym>=0.26.2" "pygame>=2.1.2"
python examples/dqn.py

### gan.py
Expand Down
5 changes: 5 additions & 0 deletions examples/audio.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Author: Jintao Huang
# Email: [email protected]
# Date:

# Ref: https://pytorch.org/audio/stable/index.html
12 changes: 7 additions & 5 deletions examples/contrastive_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ class Acc(MeanMetric):
higher_is_better = True
full_state_update = False


#
device_ids = [0]

Expand Down Expand Up @@ -58,7 +59,7 @@ def __init__(self, hparams: Dict[str, Any]) -> None:
#
resnet.fc = nn.Sequential(
resnet.fc,
nn.ReLU(),
nn.ReLU(inplace=True),
nn.Linear(4*hidden_state, hidden_state)
)
optimizer: Optimizer = getattr(optim, hparams["optim_name"])(resnet.parameters(), **hparams["optim_hparams"])
Expand Down Expand Up @@ -110,10 +111,10 @@ def test_step(self, batch: Any) -> None:

class MLP(ml.LModule):
def __init__(self, resnet: Module, hparams: Dict[str, Any]) -> None:
hidden_state = hparams["hidden_state"]
in_channels = hparams["in_channels"]
n_classes = hparams["n_classes"]
#
mlp = nn.Linear(4*hidden_state, n_classes)
mlp = nn.Linear(in_channels, n_classes)
optimizer: Optimizer = getattr(optim, hparams["optim_name"])(mlp.parameters(), **hparams["optim_hparams"])
lr_s: LRScheduler = lrs.CosineAnnealingLR(optimizer, **hparams["lrs_hparams"])
metrics = {
Expand Down Expand Up @@ -173,7 +174,7 @@ def transforms(x: Image.Image) -> List[Tensor]:
tvt.ToTensor(),
tvt.Normalize((0.5,), (0.5,)),
])
return [contrast_transforms(x) for i in range(n_views)]
return [contrast_transforms(x) for _ in range(n_views)]

train_dataset = STL10(
root=DATASETS_PATH,
Expand Down Expand Up @@ -224,6 +225,7 @@ def transforms(x: Image.Image) -> List[Tensor]:
trainer = ml.Trainer(lmodel, device_ids, runs_dir=RUNS_DIR, **hparams["trainer_hparams"])
logger.info(trainer.fit(ldm.train_dataloader, ldm.val_dataloader))
resnet = deepcopy(lmodel.resnet)
in_channels = resnet.fc[0].in_features
resnet.fc = nn.Identity()
##########
del ldm, lmodel, trainer, transforms, train_dataset, val_dataset, max_epochs, batch_size, hparams
Expand All @@ -232,7 +234,7 @@ def transforms(x: Image.Image) -> List[Tensor]:
batch_size = 256
hparams = {
"device_ids": device_ids,
"hidden_state": hidden_state,
"in_channels": in_channels,
"n_classes": 10,
"dataloader_hparams": {"batch_size": batch_size, "num_workers": 4},
"optim_name": "SGD",
Expand Down
2 changes: 1 addition & 1 deletion examples/cv_ddp.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def parse_opt() -> Namespace:
"model_name": "resnet50",
"model_hparams": {"num_classes": 10},
"model_pretrain_model": {"url": tvm.ResNet50_Weights.DEFAULT.url},
"dataloader_hparams": {"batch_size": batch_size, "num_workers": 4, "pin_memory_train": False},
"dataloader_hparams": {"batch_size": batch_size, "num_workers": 4}, # "pin_memory": False
"optim_name": "SGD",
"optim_hparams": {"lr": 1e-2, "weight_decay": 1e-4, "momentum": 0.9},
"trainer_hparams": {
Expand Down
8 changes: 6 additions & 2 deletions mini_lightning/_mini_lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,15 +272,19 @@ def __init__(
#
batch_size: int,
num_workers: int = 0,
pin_memory: bool = True,
collate_fn: Optional[Callable[[List[Any]], Any]] = None, # for test/val and (train if collate_fn_train is None)
*,
shuffle_train: bool = True,
pin_memory_train: bool = True,
drop_last_train: bool = True, # If DP/DDP, drop_last=False may cause uneven split
pin_memory_train: Optional[bool] = None,
collate_fn_train: Optional[Callable[[List[Any]], Any]] = None,
) -> None:
if pin_memory_train is None:
pin_memory_train = pin_memory
if collate_fn_train is None:
collate_fn_train = collate_fn
#
self.train_dataloader: Optional[DataLoader] = None
self.val_dataloader: Optional[DataLoader] = None
self.test_dataloader: Optional[DataLoader] = None
Expand All @@ -294,7 +298,7 @@ def __init__(
for dataset, loader_name in zip([val_dataset, test_dataset], ["val_dataloader", "test_dataloader"]):
if rank in {-1, 0} and dataset is not None:
loader = DataLoader(dataset, batch_size, shuffle=False,
num_workers=num_workers, pin_memory=False,
num_workers=num_workers, pin_memory=pin_memory,
drop_last=False, collate_fn=collate_fn)
setattr(self, loader_name, loader)

Expand Down

0 comments on commit 0ab2736

Please sign in to comment.