From b62f6a7ce8851a731ed3d72e9d87b1caf52aa047 Mon Sep 17 00:00:00 2001 From: HarshvirSandhu Date: Tue, 13 Feb 2024 13:02:57 +0530 Subject: [PATCH 1/5] Add logit helper function --- pytensor/tensor/special.py | 11 ++++++++++- tests/tensor/test_special.py | 14 ++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/pytensor/tensor/special.py b/pytensor/tensor/special.py index f9d37a3a3f..59d955541c 100644 --- a/pytensor/tensor/special.py +++ b/pytensor/tensor/special.py @@ -8,7 +8,7 @@ from pytensor.link.c.op import COp from pytensor.tensor.basic import as_tensor_variable from pytensor.tensor.elemwise import get_normalized_batch_axes -from pytensor.tensor.math import gamma, gammaln, neg, sum +from pytensor.tensor.math import gamma, gammaln, neg, sum, log class SoftmaxGrad(COp): @@ -780,6 +780,14 @@ def factorial(n): return gamma(n + 1) +def logit(x): + """ + Logit function. + + """ + return log(x/(1-x)) + + def beta(a, b): """ Beta function. @@ -801,6 +809,7 @@ def betaln(a, b): "log_softmax", "poch", "factorial", + "logit", "beta", "betaln", ] diff --git a/tests/tensor/test_special.py b/tests/tensor/test_special.py index 298df728ca..e54a269925 100644 --- a/tests/tensor/test_special.py +++ b/tests/tensor/test_special.py @@ -2,6 +2,7 @@ import pytest from scipy.special import beta as scipy_beta from scipy.special import factorial as scipy_factorial +from scipy.special import logit as scipy_logit from scipy.special import log_softmax as scipy_log_softmax from scipy.special import poch as scipy_poch from scipy.special import softmax as scipy_softmax @@ -17,6 +18,7 @@ beta, betaln, factorial, + logit, log_softmax, poch, softmax, @@ -206,6 +208,18 @@ def test_factorial(n): ) +def test_logit(): + _x = vector("x") + actual_fn = function([_x], logit(_x)) + + x = random_ranged(0, 1, (5,)) + actual = actual_fn(x) + expected = scipy_logit(x) + np.testing.assert_allclose( + actual, expected, rtol=1e-7 if config.floatX == "float64" else 1e-5 + ) + + def test_beta(): _a, _b = vectors("a", "b") actual_fn = function([_a, _b], beta(_a, _b)) From 34eec8859be94693b31a23f1a530fa80f4d200c8 Mon Sep 17 00:00:00 2001 From: HarshvirSandhu Date: Tue, 13 Feb 2024 13:10:24 +0530 Subject: [PATCH 2/5] Fix ruff format --- pytensor/tensor/special.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytensor/tensor/special.py b/pytensor/tensor/special.py index 59d955541c..adb5d25639 100644 --- a/pytensor/tensor/special.py +++ b/pytensor/tensor/special.py @@ -785,7 +785,7 @@ def logit(x): Logit function. """ - return log(x/(1-x)) + return log(x / (1 - x)) def beta(a, b): From e72a61ec1405916615aad212753934fd70fe3ea7 Mon Sep 17 00:00:00 2001 From: HarshvirSandhu Date: Tue, 13 Feb 2024 14:34:50 +0530 Subject: [PATCH 3/5] Make test_logit more readable --- tests/tensor/test_special.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/tensor/test_special.py b/tests/tensor/test_special.py index e54a269925..73ac814ed3 100644 --- a/tests/tensor/test_special.py +++ b/tests/tensor/test_special.py @@ -2,8 +2,8 @@ import pytest from scipy.special import beta as scipy_beta from scipy.special import factorial as scipy_factorial -from scipy.special import logit as scipy_logit from scipy.special import log_softmax as scipy_log_softmax +from scipy.special import logit as scipy_logit from scipy.special import poch as scipy_poch from scipy.special import softmax as scipy_softmax @@ -18,8 +18,8 @@ beta, betaln, factorial, - logit, log_softmax, + logit, poch, softmax, ) @@ -209,12 +209,12 @@ def test_factorial(n): def test_logit(): - _x = vector("x") - actual_fn = function([_x], logit(_x)) + x = vector("x") + actual_fn = function([x], logit(x)) - x = random_ranged(0, 1, (5,)) - actual = actual_fn(x) - expected = scipy_logit(x) + x_test = np.linspace(0, 1) + actual = actual_fn(x_test) + expected = scipy_logit(x_test) np.testing.assert_allclose( actual, expected, rtol=1e-7 if config.floatX == "float64" else 1e-5 ) From 3b8b2649e623635006c7c0a2023a8f24fefbef1a Mon Sep 17 00:00:00 2001 From: HarshvirSandhu Date: Tue, 13 Feb 2024 14:40:43 +0530 Subject: [PATCH 4/5] Solve linting issue --- pytensor/tensor/special.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytensor/tensor/special.py b/pytensor/tensor/special.py index adb5d25639..a2f02fabd8 100644 --- a/pytensor/tensor/special.py +++ b/pytensor/tensor/special.py @@ -8,7 +8,7 @@ from pytensor.link.c.op import COp from pytensor.tensor.basic import as_tensor_variable from pytensor.tensor.elemwise import get_normalized_batch_axes -from pytensor.tensor.math import gamma, gammaln, neg, sum, log +from pytensor.tensor.math import gamma, gammaln, log, neg, sum class SoftmaxGrad(COp): From b798ab5763dddf1b6c6b7b1bc271086b0e72cb4a Mon Sep 17 00:00:00 2001 From: HarshvirSandhu Date: Wed, 14 Feb 2024 17:51:37 +0530 Subject: [PATCH 5/5] Solve test on python 3.12 --- tests/tensor/test_special.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/tensor/test_special.py b/tests/tensor/test_special.py index 73ac814ed3..24a172860f 100644 --- a/tests/tensor/test_special.py +++ b/tests/tensor/test_special.py @@ -210,7 +210,7 @@ def test_factorial(n): def test_logit(): x = vector("x") - actual_fn = function([x], logit(x)) + actual_fn = function([x], logit(x), allow_input_downcast=True) x_test = np.linspace(0, 1) actual = actual_fn(x_test)