Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

added sigmoid_focal_loss to paddle frontend and refactored mse_loss in paddle frontend #22585

Merged
merged 11 commits into from
Sep 1, 2023
51 changes: 47 additions & 4 deletions ivy/functional/frontends/paddle/nn/functional/loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,16 +252,13 @@ def margin_ranking_loss(input, other, label, margin=0.0, reduction="mean", name=
return out


@with_supported_dtypes({"2.4.2 and below": ("float32", "float64")}, "paddle")
@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
@inputs_to_ivy_arrays
def mse_loss(input, label, reduction="mean", name=None):
reduction = _get_reduction_func(reduction)
ret = ivy.square(input - label)
ret = reduction(ret)

if ret.shape == ():
ret = ret.expand_dims()

return paddle.to_tensor(ret)


Expand Down Expand Up @@ -298,6 +295,52 @@ def nll_loss(
return output


@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
def sigmoid_focal_loss(
logit,
label,
normalizer=None,
alpha=0.25,
gamma=2.0,
reduction="sum",
name=None,
):
if reduction not in ["sum", "mean", "none"]:
raise ValueError(
"The value of 'reduction' in sigmoid_focal_loss should be 'sum', 'mean' or"
f" 'none', but received {reduction}, which is not allowed."
)

if normalizer is not None and normalizer.ndim > 1:
raise ValueError(
"Expected zero or one dimension of normalizer in sigmoid_focal_loss but"
f" got {normalizer.ndim}."
)

if not isinstance(logit, ivy.Array):
logit = ivy.array(logit)

if not isinstance(label, ivy.Array):
label = ivy.array(label)

pred = ivy.sigmoid(logit)
loss = -(
label * alpha * ivy.pow((1 - pred), gamma) * ivy.log(pred)
+ (1 - label) * (1 - alpha) * ivy.pow(pred, gamma) * ivy.log(1 - pred)
)

if normalizer is not None:
loss /= normalizer

if reduction == "sum":
return ivy.sum(loss)
elif reduction == "mean":
return ivy.mean(loss)

return loss


@with_supported_dtypes({"2.5.1 and below": ("float32", "float64")}, "paddle")
@to_ivy_arrays_and_back
def smooth_l1_loss(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -468,6 +468,87 @@ def test_paddle_nll_loss(
)


@handle_frontend_test(
fn_tree="paddle.nn.functional.sigmoid_focal_loss",
dtype_and_x=helpers.dtype_and_values(
available_dtypes=helpers.get_dtypes("float"),
num_arrays=1,
shared_dtype=False,
min_num_dims=3,
min_dim_size=3,
max_num_dims=3,
max_dim_size=3,
),
zeus2x7 marked this conversation as resolved.
Show resolved Hide resolved
dtype_and_normalizer=helpers.dtype_and_values(
available_dtypes=helpers.get_dtypes("float"),
num_arrays=1,
shared_dtype=True,
min_num_dims=1,
min_dim_size=1,
max_num_dims=1,
max_dim_size=1,
),
labels=st.lists(
(
st.lists(
(
st.lists(
st.integers(min_value=0, max_value=1), min_size=3, max_size=3
)
),
min_size=3,
max_size=3,
)
),
min_size=1,
max_size=1,
),
alpha=st.floats(
min_value=0.0,
max_value=1.0,
),
gamma=st.floats(
min_value=0.0,
max_value=5.0,
),
reduction=st.sampled_from(["mean", "sum", "none"]),
)
def test_paddle_sigmoid_focal_loss(
dtype_and_x,
dtype_and_normalizer,
labels,
alpha,
gamma,
reduction,
on_device,
fn_tree,
frontend,
test_flags,
backend_fw,
):
x_dtype, x = dtype_and_x
normalizer_dtype, normalizer = dtype_and_normalizer
x[0] = x[0].reshape([3, 3, 3])
normalizer = [norm.reshape(-1) for norm in normalizer]

labels = ivy.array(labels, dtype=ivy.int64)
labels = labels.reshape([3, 3, 1])
helpers.test_frontend_function(
input_dtypes=[ivy.int64] + [ivy.float64] + x_dtype + normalizer_dtype,
backend_to_test=backend_fw,
frontend=frontend,
test_flags=test_flags,
fn_tree=fn_tree,
on_device=on_device,
logit=x[0],
label=labels,
alpha=alpha,
gamma=gamma,
normalizer=normalizer[0],
reduction=reduction,
)


# smooth_l1_loss
@handle_frontend_test(
fn_tree="paddle.nn.functional.smooth_l1_loss",
Expand Down
Loading