From 6392f9340c2137bd91b475087c00facd8d5db4ec Mon Sep 17 00:00:00 2001 From: Sebastiaan Meijer Date: Tue, 13 Feb 2024 18:51:04 +0100 Subject: [PATCH] Fix an issue where gradient accumulation could not be passed as argument due to a type error. --- nerfstudio/engine/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nerfstudio/engine/trainer.py b/nerfstudio/engine/trainer.py index 68b026c13f..9b4d6d8a15 100644 --- a/nerfstudio/engine/trainer.py +++ b/nerfstudio/engine/trainer.py @@ -82,7 +82,7 @@ class TrainerConfig(ExperimentConfig): """Path to checkpoint file.""" log_gradients: bool = False """Optionally log gradients during training""" - gradient_accumulation_steps: Dict = field(default_factory=lambda: {}) + gradient_accumulation_steps: Dict[str, int] = field(default_factory=lambda: {}) """Number of steps to accumulate gradients over. Contains a mapping of {param_group:num}"""