From 1bcdd9db8cf28bd222b59e16d0a6b18aae20f3c7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 12:19:19 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../fabric/plugins/precision/bitsandbytes.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/lightning/fabric/plugins/precision/bitsandbytes.py b/src/lightning/fabric/plugins/precision/bitsandbytes.py index 83dfcf28a7f89..2a6d5b4369269 100644 --- a/src/lightning/fabric/plugins/precision/bitsandbytes.py +++ b/src/lightning/fabric/plugins/precision/bitsandbytes.py @@ -184,15 +184,15 @@ def _replace_param( if param.device.type == "meta": if isinstance(param, bnb.nn.Params4bit): return bnb.nn.Params4bit( - data = data, - requires_grad = data.requires_grad, - quant_state = quant_state, - blocksize = param.blocksize, - compress_statistics = param.compress_statistics, - quant_type = param.quant_type, - quant_storage = param.quant_storage, - module = param.module, - bnb_quantized = param.bnb_quantized + data=data, + requires_grad=data.requires_grad, + quant_state=quant_state, + blocksize=param.blocksize, + compress_statistics=param.compress_statistics, + quant_type=param.quant_type, + quant_storage=param.quant_storage, + module=param.module, + bnb_quantized=param.bnb_quantized, ) return torch.nn.Parameter(data, requires_grad=data.requires_grad) param.data = data