Skip to content

Commit

Permalink
🤖 Lint code
Browse files Browse the repository at this point in the history
  • Loading branch information
ivy-branch committed Sep 30, 2023
1 parent fc79385 commit a04565a
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 6 deletions.
9 changes: 6 additions & 3 deletions ivy/functional/frontends/paddle/nn/functional/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,11 +110,14 @@ def linear(x, weight, bias=None, name=None):
weight = ivy.swapaxes(weight, -1, -2)
return ivy.linear(x, weight, bias=bias)


@to_ivy_arrays_and_back
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def alpha_dropout( x, p=0.5, training=True, name=None ):
def alpha_dropout(x, p=0.5, training=True, name=None):
return ivy.alpha_dropout(x, p=p, training=training)


@to_ivy_arrays_and_back
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
def fold( x, output_sizes, kernel_sizes, strides=1, paddings=0, dilations=1, name=None ):
return ivy.fold(x, output_sizes, kernel_sizes, strides, paddings, dilations)
def fold(x, output_sizes, kernel_sizes, strides=1, paddings=0, dilations=1, name=None):
return ivy.fold(x, output_sizes, kernel_sizes, strides, paddings, dilations)
Original file line number Diff line number Diff line change
Expand Up @@ -417,6 +417,7 @@ def test_linear(
bias=bias,
)


# alpha_dropout
@handle_frontend_test(
fn_tree="paddle.nn.functional.common.alpha_dropout",
Expand Down Expand Up @@ -464,8 +465,9 @@ def test_paddle_alpha_dropout(
axis=axis,
mode=mode,
)

#fold


# fold
@handle_frontend_test(
fn_tree="paddle.nn.functional.common.fold",
d_type_and_x=helpers.dtype_and_values(
Expand Down Expand Up @@ -511,4 +513,4 @@ def test_paddle_fold(
training=training,
axis=axis,
mode=mode,
)
)

0 comments on commit a04565a

Please sign in to comment.