Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor torch frontend avg pooling functions #21812

Merged
merged 11 commits into from
Aug 17, 2023
49 changes: 28 additions & 21 deletions ivy/functional/frontends/torch/nn/functional/pooling_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,18 @@ def _broadcast_pooling_helper(x, pool_dims: str = "2d", name: str = "padding"):
)


@to_ivy_arrays_and_back
AnnaTz marked this conversation as resolved.
Show resolved Hide resolved
@to_ivy_arrays_and_back
def avg_pool1d(
input, kernel_size, stride=None, padding=0, ceil_mode=False, count_include_pad=True
input,
kernel_size,
stride=None,
padding=0,
ceil_mode=False,
count_include_pad=True,
):
if stride is None:
stride = kernel_size
kernel_size = _broadcast_pooling_helper(kernel_size, "1d", name="kernel_size")
stride = _broadcast_pooling_helper(stride, "1d", name="stride")
padding = _broadcast_pooling_helper(padding, "1d", name="padding")
Expand All @@ -48,10 +56,15 @@ def avg_pool1d(
padding_str = "VALID"

return ivy.avg_pool1d(
input, kernel_size, stride, padding_str, data_format=data_format
input,
kernel_size,
stride,
padding_str,
data_format=data_format,
count_include_pad=count_include_pad,
ceil_mode=ceil_mode,
)


@to_ivy_arrays_and_back
def avg_pool2d(
input,
Expand All @@ -62,16 +75,9 @@ def avg_pool2d(
count_include_pad=True,
divisor_override=None,
):
# Figure out input dims N
input_rank = input.ndim

if input_rank == 3:
# CHW
data_format = "CHW"
elif input_rank == 4:
# NCHW
data_format = "NCHW"

if stride is None:
stride = kernel_size
data_format = "NCHW"
kernel_size = _broadcast_pooling_helper(kernel_size, "2d", name="kernel_size")
stride = _broadcast_pooling_helper(stride, "2d", name="stride")
padding = _broadcast_pooling_helper(padding, "2d", name="padding")
AnnaTz marked this conversation as resolved.
Show resolved Hide resolved
Expand Down Expand Up @@ -271,14 +277,15 @@ def avg_pool3d(
):
if stride is None:
stride = kernel_size
if not isinstance(padding, int):
padding = [(padding[i],) * 2 for i in range(3)]

if not all([pad <= kernel // 2 for kernel, pad in zip(kernel_size, padding)]):
raise ValueError(
"pad should be smaller than or equal to half of kernel size, "
f"but got padding={padding}, kernel_size={kernel_size}. "
)
kernel_size = _broadcast_pooling_helper(kernel_size, "3d", name="kernel_size")
stride = _broadcast_pooling_helper(stride, "3d", name="stride")
padding = _broadcast_pooling_helper(padding, "3d", name="padding")
if all(
[pad == ivy.ceil((kernel - 1) / 2) for kernel, pad in zip(kernel_size, padding)]
):
padding = "SAME"
else:
padding = "VALID"
return ivy.avg_pool3d(
input,
kernel_size,
Expand Down
2 changes: 1 addition & 1 deletion ivy/stateful/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import os
import abc
import copy
import dill
#import dill
AnnaTz marked this conversation as resolved.
Show resolved Hide resolved
from typing import Optional, Tuple, Dict

# local
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def calculate_same_padding(kernel_size, stride, shape):
if all([kernel_size[i] / 2 >= padding[i] for i in range(len(kernel_size))]):
if is_same_padding(padding, stride, kernel_size, shape):
return padding
return (0, 0)
return [0] * len(shape)


# avg_pool1d
Expand All @@ -47,11 +47,16 @@ def calculate_same_padding(kernel_size, stride, shape):
max_dims=3,
min_side=1,
max_side=3,
data_format="channel_first",
),
count_include_pad=st.just(False),
ceil_mode=st.booleans(),
test_with_out=st.just(False),
)
def test_torch_avg_pool1d(
dtype_x_k_s,
count_include_pad,
ceil_mode,
*,
test_flags,
frontend,
Expand All @@ -60,21 +65,14 @@ def test_torch_avg_pool1d(
on_device,
):
input_dtype, x, kernel_size, stride, padding = dtype_x_k_s

# Torch ground truth func expects input to be consistent
# with a channels first format i.e. NCW
x[0] = x[0].reshape((x[0].shape[0], x[0].shape[-1], x[0].shape[1]))
x_shape = list(x[0].shape[2])
x_shape = [x[0].shape[2]]

# Torch ground truth func also takes padding input as an integer
# or a tuple of integers, not a string
padding = tuple(
[
ivy.handle_padding(x_shape[i], stride[0], kernel_size[i], padding)
for i in range(len(x_shape))
]
)

if padding == "SAME":
padding = calculate_same_padding(kernel_size, stride, x_shape)
else:
padding = (0,)
helpers.test_frontend_function(
input_dtypes=input_dtype,
backend_to_test=backend_fw,
Expand All @@ -86,8 +84,8 @@ def test_torch_avg_pool1d(
kernel_size=kernel_size,
stride=stride,
padding=padding,
ceil_mode=False,
count_include_pad=True,
ceil_mode=ceil_mode,
count_include_pad=count_include_pad,
)


Expand All @@ -99,6 +97,7 @@ def test_torch_avg_pool1d(
max_dims=4,
min_side=1,
max_side=4,
data_format="channel_first",
),
ceil_mode=st.booleans(),
count_include_pad=st.booleans(),
Expand All @@ -125,8 +124,6 @@ def test_torch_avg_pool2d(
else:
padding = (0, 0)

x[0] = x[0].reshape((x[0].shape[0], x[0].shape[-1], *x[0].shape[1:-1]))

helpers.test_frontend_function(
input_dtypes=input_dtype,
backend_to_test=backend_fw,
Expand All @@ -144,19 +141,20 @@ def test_torch_avg_pool2d(
)


# avg_pool3d
@handle_frontend_test(
fn_tree="torch.nn.functional.avg_pool3d",
dtype_x_k_s=helpers.arrays_for_pooling(
min_dims=5,
max_dims=5,
min_side=1,
min_side=2,
max_side=4,
only_explicit_padding=False,
data_format="channel_first",
only_explicit_padding=True,
),
count_include_pad=st.booleans(),
ceil_mode=st.booleans(),
divisor_override=st.one_of(st.none(), st.integers(min_value=1, max_value=4)),
divisor_override=st.just(None),
test_with_out=st.just(False),
)
def test_torch_avg_pool3d(
Expand All @@ -173,7 +171,12 @@ def test_torch_avg_pool3d(
):
input_dtype, x, kernel_size, stride, padding = dtype_x_k_s

padding = [padding[i][0] for i in range(len(padding))]
if len(stride) == 1:
stride = [stride[0]] * 3
if padding == "SAME":
padding = calculate_same_padding(kernel_size, stride, x[0].shape[2:])
elif padding == "VALID":
padding = (0, 0, 0)
AnnaTz marked this conversation as resolved.
Show resolved Hide resolved

helpers.test_frontend_function(
input_dtypes=input_dtype,
Expand Down
Loading