diff --git a/ivy_tests/test_ivy/test_frontends/test_torch/test_non_linear_activation_functions.py b/ivy_tests/test_ivy/test_frontends/test_torch/test_non_linear_activation_functions.py index 20c6f886e6730..e1e9340810c8d 100644 --- a/ivy_tests/test_ivy/test_frontends/test_torch/test_non_linear_activation_functions.py +++ b/ivy_tests/test_ivy/test_frontends/test_torch/test_non_linear_activation_functions.py @@ -186,12 +186,9 @@ def test_torch_leaky_relu( ) ) ), - as_variable=st.booleans(), - with_out=st.booleans(), num_positional_args=helpers.num_positional_args( fn_name="ivy.functional.frontends.torch.tanh" ), - native_array=st.booleans(), ) def test_torch_tanh( dtype_and_x, @@ -204,7 +201,7 @@ def test_torch_tanh( input_dtype, x = dtype_and_x helpers.test_frontend_function( - input_dtypes=input_dtype, + input_dtypes=[input_dtype], as_variable_flags=as_variable, with_out=with_out, num_positional_args=num_positional_args, @@ -214,4 +211,4 @@ def test_torch_tanh( fn_name="tanh", input=np.asarray(x, dtype=input_dtype), out=None, - ) \ No newline at end of file + )