Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Getting the stateful tests up and running #28438

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions ivy_tests/test_ivy/helpers/testing_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -664,7 +664,9 @@ def handle_method(
)

if init_num_positional_args is None:
init_num_positional_args = num_positional_args(fn_name=init_tree)
init_num_positional_args = num_positional_args(
fn_name=method_tree.rpartition(".")[0]
)

possible_arguments["init_flags"] = pf.init_method_flags(
num_positional_args=init_num_positional_args,
Expand All @@ -673,10 +675,7 @@ def handle_method(
precision_mode=_get_runtime_flag_value(precision_mode),
)

if method_num_positional_args is None:
method_num_positional_args = num_positional_args_method(
method=callable_method
)
method_num_positional_args = num_positional_args_method(method=callable_method)

possible_arguments["method_flags"] = pf.method_flags(
num_positional_args=method_num_positional_args,
Expand Down
56 changes: 26 additions & 30 deletions ivy_tests/test_ivy/test_stateful/test_activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
small_abs_safety_factor=8,
safety_factor_scale="log",
),
method_num_positional_args=helpers.num_positional_args(fn_name="ELU._forward"),
method_num_positional_args=helpers.num_positional_args(method="ELU._forward"),
test_gradients=st.just(True),
alpha=helpers.floats(min_value=0.1, max_value=1),
)
Expand All @@ -45,8 +45,8 @@ def test_elu(
method_flags=method_flags,
init_input_dtypes=input_dtype,
method_input_dtypes=input_dtype,
init_all_as_kwargs_np={},
method_all_as_kwargs_np={"x": x[0], "alpha": alpha},
init_all_as_kwargs_np={"alpha": alpha},
method_all_as_kwargs_np={"x": x[0]},
class_name=class_name,
method_name=method_name,
rtol_=1e-2,
Expand All @@ -66,7 +66,7 @@ def test_elu(
small_abs_safety_factor=8,
safety_factor_scale="log",
),
method_num_positional_args=helpers.num_positional_args(fn_name="GEGLU._forward"),
method_num_positional_args=helpers.num_positional_args(method="GEGLU._forward"),
test_gradients=st.just(True),
)
def test_geglu(
Expand Down Expand Up @@ -112,7 +112,7 @@ def test_geglu(
safety_factor_scale="linear",
),
approximate=st.booleans(),
method_num_positional_args=helpers.num_positional_args(fn_name="GELU._forward"),
method_num_positional_args=helpers.num_positional_args(method="GELU._forward"),
test_gradients=st.just(True),
)
def test_gelu(
Expand Down Expand Up @@ -157,9 +157,7 @@ def test_gelu(
safety_factor_scale="log",
min_num_dims=2,
),
method_num_positional_args=helpers.num_positional_args(
fn_name="Hardswish._forward"
),
method_num_positional_args=helpers.num_positional_args(method="Hardswish._forward"),
test_gradients=st.just(True),
)
def test_hardswish(
Expand Down Expand Up @@ -204,9 +202,7 @@ def test_hardswish(
safety_factor_scale="log",
),
alpha=st.floats(min_value=-1e-4, max_value=1e-4),
method_num_positional_args=helpers.num_positional_args(
fn_name="LeakyReLU._forward"
),
method_num_positional_args=helpers.num_positional_args(method="LeakyReLU._forward"),
test_gradients=st.just(True),
)
def test_leaky_relu(
Expand Down Expand Up @@ -253,7 +249,7 @@ def test_leaky_relu(
),
axis=helpers.ints(min_value=-1, max_value=0),
method_num_positional_args=helpers.num_positional_args(
fn_name="LogSoftmax._forward"
method="LogSoftmax._forward"
),
test_gradients=st.just(True),
)
Expand Down Expand Up @@ -299,7 +295,7 @@ def test_log_softmax(
safety_factor_scale="log",
min_num_dims=2,
),
method_num_positional_args=helpers.num_positional_args(fn_name="Logit._forward"),
method_num_positional_args=helpers.num_positional_args(method="Logit._forward"),
eps=helpers.floats(min_value=1e-4, max_value=1e-2),
test_gradients=st.just(True),
)
Expand All @@ -324,8 +320,8 @@ def test_logit(
method_flags=method_flags,
init_input_dtypes=input_dtype,
method_input_dtypes=input_dtype,
init_all_as_kwargs_np={},
method_all_as_kwargs_np={"x": x[0], "eps": eps},
init_all_as_kwargs_np={"eps": eps},
method_all_as_kwargs_np={"x": x[0]},
class_name=class_name,
method_name=method_name,
rtol_=1e-2,
Expand All @@ -346,7 +342,7 @@ def test_logit(
min_num_dims=2,
),
method_num_positional_args=helpers.num_positional_args(
fn_name="LogSigmoid._forward"
method="LogSigmoid._forward"
),
test_gradients=st.just(True),
)
Expand Down Expand Up @@ -389,7 +385,7 @@ def test_logsigmoid(
small_abs_safety_factor=8,
safety_factor_scale="log",
),
method_num_positional_args=helpers.num_positional_args(fn_name="Mish._forward"),
method_num_positional_args=helpers.num_positional_args(method="Mish._forward"),
test_gradients=st.just(True),
)
def test_mish(
Expand Down Expand Up @@ -435,7 +431,7 @@ def test_mish(
small_abs_safety_factor=8,
safety_factor_scale="log",
),
method_num_positional_args=helpers.num_positional_args(fn_name="PReLU._forward"),
method_num_positional_args=helpers.num_positional_args(method="PReLU._forward"),
test_gradients=st.just(True),
)
def test_prelu(
Expand Down Expand Up @@ -477,7 +473,7 @@ def test_prelu(
small_abs_safety_factor=8,
safety_factor_scale="log",
),
method_num_positional_args=helpers.num_positional_args(fn_name="ReLU._forward"),
method_num_positional_args=helpers.num_positional_args(method="ReLU._forward"),
test_gradients=st.just(True),
)
def test_relu(
Expand Down Expand Up @@ -521,7 +517,7 @@ def test_relu(
safety_factor_scale="log",
min_num_dims=2,
),
method_num_positional_args=helpers.num_positional_args(fn_name="ReLU6._forward"),
method_num_positional_args=helpers.num_positional_args(method="ReLU6._forward"),
test_gradients=st.just(True),
)
def test_relu6(
Expand Down Expand Up @@ -565,7 +561,7 @@ def test_relu6(
safety_factor_scale="log",
min_num_dims=2,
),
method_num_positional_args=helpers.num_positional_args(fn_name="SeLU._forward"),
method_num_positional_args=helpers.num_positional_args(method="SeLU._forward"),
test_gradients=st.just(True),
)
def test_selu(
Expand Down Expand Up @@ -609,7 +605,7 @@ def test_selu(
safety_factor_scale="log",
min_num_dims=2,
),
method_num_positional_args=helpers.num_positional_args(fn_name="Sigmoid._forward"),
method_num_positional_args=helpers.num_positional_args(method="Sigmoid._forward"),
test_gradients=st.just(True),
)
def test_sigmoid(
Expand Down Expand Up @@ -651,7 +647,7 @@ def test_sigmoid(
small_abs_safety_factor=8,
safety_factor_scale="log",
),
method_num_positional_args=helpers.num_positional_args(fn_name="SiLU._forward"),
method_num_positional_args=helpers.num_positional_args(method="SiLU._forward"),
test_gradients=st.just(True),
)
def test_silu(
Expand Down Expand Up @@ -695,7 +691,7 @@ def test_silu(
safety_factor_scale="log",
),
axis=helpers.ints(min_value=-1, max_value=0),
method_num_positional_args=helpers.num_positional_args(fn_name="Softmax._forward"),
method_num_positional_args=helpers.num_positional_args(method="Softmax._forward"),
test_gradients=st.just(True),
)
def test_softmax(
Expand All @@ -719,8 +715,8 @@ def test_softmax(
method_flags=method_flags,
init_input_dtypes=input_dtype,
method_input_dtypes=input_dtype,
init_all_as_kwargs_np={},
method_all_as_kwargs_np={"x": x[0], "axis": axis},
init_all_as_kwargs_np={"axis": axis},
method_all_as_kwargs_np={"x": x[0]},
class_name=class_name,
method_name=method_name,
rtol_=1e-2,
Expand All @@ -741,7 +737,7 @@ def test_softmax(
),
beta=st.one_of(helpers.number(min_value=0.1, max_value=10), st.none()),
threshold=st.one_of(helpers.number(min_value=0.1, max_value=30), st.none()),
method_num_positional_args=helpers.num_positional_args(fn_name="Softplus._forward"),
method_num_positional_args=helpers.num_positional_args(method="Softplus._forward"),
test_gradients=st.just(True),
)
def test_softplus(
Expand All @@ -766,8 +762,8 @@ def test_softplus(
method_flags=method_flags,
init_input_dtypes=input_dtype,
method_input_dtypes=input_dtype,
init_all_as_kwargs_np={},
method_all_as_kwargs_np={"x": x[0], "beta": beta, "threshold": threshold},
init_all_as_kwargs_np={"beta": beta, "threshold": threshold},
method_all_as_kwargs_np={"x": x[0]},
class_name=class_name,
method_name=method_name,
rtol_=1e-2,
Expand All @@ -787,7 +783,7 @@ def test_softplus(
safety_factor_scale="log",
min_num_dims=2,
),
method_num_positional_args=helpers.num_positional_args(fn_name="Tanh._forward"),
method_num_positional_args=helpers.num_positional_args(method="Tanh._forward"),
test_gradients=st.just(True),
)
def test_tanh(
Expand Down
Loading