From 71c1dba925f6a869d680ef73defc38b7ea6550de Mon Sep 17 00:00:00 2001 From: Victor Vargas Date: Mon, 15 Apr 2024 19:22:00 +0200 Subject: [PATCH 1/5] renamed distributions module to soft_labelling --- dlordinal/losses/beta_loss.py | 2 +- dlordinal/losses/binomial_loss.py | 2 +- dlordinal/losses/exponential_loss.py | 2 +- dlordinal/losses/general_triangular_loss.py | 2 +- dlordinal/losses/poisson_loss.py | 2 +- dlordinal/losses/triangular_loss.py | 2 +- dlordinal/{distributions => soft_labelling}/__init__.py | 0 .../{distributions => soft_labelling}/beta_distribution.py | 0 .../{distributions => soft_labelling}/binomial_distribution.py | 0 .../exponential_distribution.py | 0 .../general_triangular_distribution.py | 0 .../{distributions => soft_labelling}/poisson_distribution.py | 0 dlordinal/{distributions => soft_labelling}/tests/__init__.py | 0 .../tests/test_beta_distribution.py | 0 .../tests/test_binomial_distribution.py | 0 .../tests/test_exponential_distribution.py | 0 .../tests/test_general_triangular_distribution.py | 0 .../tests/test_poisson_distribution.py | 0 .../tests/test_triangular_distribution.py | 0 dlordinal/{distributions => soft_labelling}/tests/test_utils.py | 0 .../triangular_distribution.py | 0 dlordinal/{distributions => soft_labelling}/utils.py | 0 22 files changed, 6 insertions(+), 6 deletions(-) rename dlordinal/{distributions => soft_labelling}/__init__.py (100%) rename dlordinal/{distributions => soft_labelling}/beta_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/binomial_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/exponential_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/general_triangular_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/poisson_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/tests/__init__.py (100%) rename dlordinal/{distributions => soft_labelling}/tests/test_beta_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/tests/test_binomial_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/tests/test_exponential_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/tests/test_general_triangular_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/tests/test_poisson_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/tests/test_triangular_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/tests/test_utils.py (100%) rename dlordinal/{distributions => soft_labelling}/triangular_distribution.py (100%) rename dlordinal/{distributions => soft_labelling}/utils.py (100%) diff --git a/dlordinal/losses/beta_loss.py b/dlordinal/losses/beta_loss.py index 51ec9c1..8fd28e4 100644 --- a/dlordinal/losses/beta_loss.py +++ b/dlordinal/losses/beta_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..distributions import get_beta_softlabels +from ..soft_labelling import get_beta_softlabels from .custom_targets_loss import CustomTargetsCrossEntropyLoss # Params [a,b] for beta distribution diff --git a/dlordinal/losses/binomial_loss.py b/dlordinal/losses/binomial_loss.py index d146f20..b85ecd3 100644 --- a/dlordinal/losses/binomial_loss.py +++ b/dlordinal/losses/binomial_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..distributions import get_binomial_softlabels +from ..soft_labelling import get_binomial_softlabels from .custom_targets_loss import CustomTargetsCrossEntropyLoss diff --git a/dlordinal/losses/exponential_loss.py b/dlordinal/losses/exponential_loss.py index a059b31..d38ecd8 100644 --- a/dlordinal/losses/exponential_loss.py +++ b/dlordinal/losses/exponential_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..distributions import get_exponential_softlabels +from ..soft_labelling import get_exponential_softlabels from .custom_targets_loss import CustomTargetsCrossEntropyLoss diff --git a/dlordinal/losses/general_triangular_loss.py b/dlordinal/losses/general_triangular_loss.py index 0ad1c06..a2ba122 100644 --- a/dlordinal/losses/general_triangular_loss.py +++ b/dlordinal/losses/general_triangular_loss.py @@ -4,7 +4,7 @@ import torch from torch import Tensor -from ..distributions import get_general_triangular_softlabels +from ..soft_labelling import get_general_triangular_softlabels from .custom_targets_loss import CustomTargetsCrossEntropyLoss diff --git a/dlordinal/losses/poisson_loss.py b/dlordinal/losses/poisson_loss.py index d19ea94..260c36b 100644 --- a/dlordinal/losses/poisson_loss.py +++ b/dlordinal/losses/poisson_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..distributions import get_poisson_probabilities +from ..soft_labelling import get_poisson_probabilities from .custom_targets_loss import CustomTargetsCrossEntropyLoss diff --git a/dlordinal/losses/triangular_loss.py b/dlordinal/losses/triangular_loss.py index 890d1c5..8bb4a8d 100644 --- a/dlordinal/losses/triangular_loss.py +++ b/dlordinal/losses/triangular_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..distributions import get_triangular_softlabels +from ..soft_labelling import get_triangular_softlabels from .custom_targets_loss import CustomTargetsCrossEntropyLoss diff --git a/dlordinal/distributions/__init__.py b/dlordinal/soft_labelling/__init__.py similarity index 100% rename from dlordinal/distributions/__init__.py rename to dlordinal/soft_labelling/__init__.py diff --git a/dlordinal/distributions/beta_distribution.py b/dlordinal/soft_labelling/beta_distribution.py similarity index 100% rename from dlordinal/distributions/beta_distribution.py rename to dlordinal/soft_labelling/beta_distribution.py diff --git a/dlordinal/distributions/binomial_distribution.py b/dlordinal/soft_labelling/binomial_distribution.py similarity index 100% rename from dlordinal/distributions/binomial_distribution.py rename to dlordinal/soft_labelling/binomial_distribution.py diff --git a/dlordinal/distributions/exponential_distribution.py b/dlordinal/soft_labelling/exponential_distribution.py similarity index 100% rename from dlordinal/distributions/exponential_distribution.py rename to dlordinal/soft_labelling/exponential_distribution.py diff --git a/dlordinal/distributions/general_triangular_distribution.py b/dlordinal/soft_labelling/general_triangular_distribution.py similarity index 100% rename from dlordinal/distributions/general_triangular_distribution.py rename to dlordinal/soft_labelling/general_triangular_distribution.py diff --git a/dlordinal/distributions/poisson_distribution.py b/dlordinal/soft_labelling/poisson_distribution.py similarity index 100% rename from dlordinal/distributions/poisson_distribution.py rename to dlordinal/soft_labelling/poisson_distribution.py diff --git a/dlordinal/distributions/tests/__init__.py b/dlordinal/soft_labelling/tests/__init__.py similarity index 100% rename from dlordinal/distributions/tests/__init__.py rename to dlordinal/soft_labelling/tests/__init__.py diff --git a/dlordinal/distributions/tests/test_beta_distribution.py b/dlordinal/soft_labelling/tests/test_beta_distribution.py similarity index 100% rename from dlordinal/distributions/tests/test_beta_distribution.py rename to dlordinal/soft_labelling/tests/test_beta_distribution.py diff --git a/dlordinal/distributions/tests/test_binomial_distribution.py b/dlordinal/soft_labelling/tests/test_binomial_distribution.py similarity index 100% rename from dlordinal/distributions/tests/test_binomial_distribution.py rename to dlordinal/soft_labelling/tests/test_binomial_distribution.py diff --git a/dlordinal/distributions/tests/test_exponential_distribution.py b/dlordinal/soft_labelling/tests/test_exponential_distribution.py similarity index 100% rename from dlordinal/distributions/tests/test_exponential_distribution.py rename to dlordinal/soft_labelling/tests/test_exponential_distribution.py diff --git a/dlordinal/distributions/tests/test_general_triangular_distribution.py b/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py similarity index 100% rename from dlordinal/distributions/tests/test_general_triangular_distribution.py rename to dlordinal/soft_labelling/tests/test_general_triangular_distribution.py diff --git a/dlordinal/distributions/tests/test_poisson_distribution.py b/dlordinal/soft_labelling/tests/test_poisson_distribution.py similarity index 100% rename from dlordinal/distributions/tests/test_poisson_distribution.py rename to dlordinal/soft_labelling/tests/test_poisson_distribution.py diff --git a/dlordinal/distributions/tests/test_triangular_distribution.py b/dlordinal/soft_labelling/tests/test_triangular_distribution.py similarity index 100% rename from dlordinal/distributions/tests/test_triangular_distribution.py rename to dlordinal/soft_labelling/tests/test_triangular_distribution.py diff --git a/dlordinal/distributions/tests/test_utils.py b/dlordinal/soft_labelling/tests/test_utils.py similarity index 100% rename from dlordinal/distributions/tests/test_utils.py rename to dlordinal/soft_labelling/tests/test_utils.py diff --git a/dlordinal/distributions/triangular_distribution.py b/dlordinal/soft_labelling/triangular_distribution.py similarity index 100% rename from dlordinal/distributions/triangular_distribution.py rename to dlordinal/soft_labelling/triangular_distribution.py diff --git a/dlordinal/distributions/utils.py b/dlordinal/soft_labelling/utils.py similarity index 100% rename from dlordinal/distributions/utils.py rename to dlordinal/soft_labelling/utils.py From 0f434fc820a897171779cad79dfc05ffeddfaa77 Mon Sep 17 00:00:00 2001 From: victormvy Date: Mon, 15 Apr 2024 19:42:35 +0200 Subject: [PATCH 2/5] changed docs accordingly to new soft_labelling module --- docs/api.rst | 2 +- docs/distributions.rst | 9 --------- docs/soft_labelling.rst | 9 +++++++++ 3 files changed, 10 insertions(+), 10 deletions(-) delete mode 100644 docs/distributions.rst create mode 100644 docs/soft_labelling.rst diff --git a/docs/api.rst b/docs/api.rst index 0748974..9cb0dd7 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -12,8 +12,8 @@ This is the API for the **dlordinal** package. losses datasets - distributions layers models metrics sklearn_integration + soft_labelling diff --git a/docs/distributions.rst b/docs/distributions.rst deleted file mode 100644 index fd0a674..0000000 --- a/docs/distributions.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. _distributions: - -Probability distributions -========================= - -.. automodule:: dlordinal.distributions - :members: - -.. footbibliography:: diff --git a/docs/soft_labelling.rst b/docs/soft_labelling.rst new file mode 100644 index 0000000..6c9cc87 --- /dev/null +++ b/docs/soft_labelling.rst @@ -0,0 +1,9 @@ +.. _soft_labelling: + +Soft labelling +========================= + +.. automodule:: dlordinal.soft_labelling + :members: + +.. footbibliography:: From 41bc26e04d31320f36ff4da645769fd6665771e8 Mon Sep 17 00:00:00 2001 From: victormvy Date: Mon, 15 Apr 2024 19:49:18 +0200 Subject: [PATCH 3/5] updated import in soft_labelling tests --- dlordinal/soft_labelling/tests/test_beta_distribution.py | 4 ++-- dlordinal/soft_labelling/tests/test_binomial_distribution.py | 2 +- .../soft_labelling/tests/test_exponential_distribution.py | 2 +- .../tests/test_general_triangular_distribution.py | 2 +- dlordinal/soft_labelling/tests/test_poisson_distribution.py | 2 +- .../soft_labelling/tests/test_triangular_distribution.py | 2 +- dlordinal/soft_labelling/tests/test_utils.py | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/dlordinal/soft_labelling/tests/test_beta_distribution.py b/dlordinal/soft_labelling/tests/test_beta_distribution.py index 0224164..d754c99 100644 --- a/dlordinal/soft_labelling/tests/test_beta_distribution.py +++ b/dlordinal/soft_labelling/tests/test_beta_distribution.py @@ -1,8 +1,8 @@ import numpy as np import pytest -from dlordinal.distributions import get_beta_softlabels -from dlordinal.distributions.beta_distribution import beta_dist, beta_func +from dlordinal.soft_labelling import get_beta_softlabels +from dlordinal.soft_labelling.beta_distribution import beta_dist, beta_func def test_beta_inc(): diff --git a/dlordinal/soft_labelling/tests/test_binomial_distribution.py b/dlordinal/soft_labelling/tests/test_binomial_distribution.py index 7802221..a849b66 100644 --- a/dlordinal/soft_labelling/tests/test_binomial_distribution.py +++ b/dlordinal/soft_labelling/tests/test_binomial_distribution.py @@ -1,7 +1,7 @@ import numpy as np import pytest -from dlordinal.distributions import get_binomial_softlabels +from dlordinal.soft_labelling import get_binomial_softlabels def test_get_binomial_probabilities(): diff --git a/dlordinal/soft_labelling/tests/test_exponential_distribution.py b/dlordinal/soft_labelling/tests/test_exponential_distribution.py index 76f2c8d..c3f900a 100644 --- a/dlordinal/soft_labelling/tests/test_exponential_distribution.py +++ b/dlordinal/soft_labelling/tests/test_exponential_distribution.py @@ -1,7 +1,7 @@ import numpy as np import pytest -from dlordinal.distributions import get_exponential_softlabels +from dlordinal.soft_labelling import get_exponential_softlabels def test_get_exponential_probabilities(): diff --git a/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py b/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py index 390e1ff..bf0ca61 100644 --- a/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py +++ b/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py @@ -3,7 +3,7 @@ import numpy as np import pytest -from dlordinal.distributions import get_general_triangular_params +from dlordinal.soft_labelling import get_general_triangular_params def test_get_general_triangular_params(): diff --git a/dlordinal/soft_labelling/tests/test_poisson_distribution.py b/dlordinal/soft_labelling/tests/test_poisson_distribution.py index 2d4df26..c839e97 100644 --- a/dlordinal/soft_labelling/tests/test_poisson_distribution.py +++ b/dlordinal/soft_labelling/tests/test_poisson_distribution.py @@ -1,7 +1,7 @@ import numpy as np import pytest -from dlordinal.distributions import get_poisson_probabilities +from dlordinal.soft_labelling import get_poisson_probabilities def test_get_poisson_probabilities(): diff --git a/dlordinal/soft_labelling/tests/test_triangular_distribution.py b/dlordinal/soft_labelling/tests/test_triangular_distribution.py index 511a4f9..e3a52a5 100644 --- a/dlordinal/soft_labelling/tests/test_triangular_distribution.py +++ b/dlordinal/soft_labelling/tests/test_triangular_distribution.py @@ -1,7 +1,7 @@ import numpy as np import pytest -from dlordinal.distributions import get_triangular_softlabels +from dlordinal.soft_labelling import get_triangular_softlabels def test_get_triangular_probabilities(): diff --git a/dlordinal/soft_labelling/tests/test_utils.py b/dlordinal/soft_labelling/tests/test_utils.py index 2bd1c8c..f5738aa 100644 --- a/dlordinal/soft_labelling/tests/test_utils.py +++ b/dlordinal/soft_labelling/tests/test_utils.py @@ -1,4 +1,4 @@ -from dlordinal.distributions.utils import get_intervals, triangular_cdf +from dlordinal.soft_labelling.utils import get_intervals, triangular_cdf def test_get_intervals(): From 51e1facb3ceecec7c8ebec8febcb2e28cf78263a Mon Sep 17 00:00:00 2001 From: victormvy Date: Mon, 15 Apr 2024 20:14:43 +0200 Subject: [PATCH 4/5] renamed poisson and updated docstrings --- dlordinal/losses/poisson_loss.py | 4 ++-- dlordinal/soft_labelling/__init__.py | 4 ++-- dlordinal/soft_labelling/beta_distribution.py | 4 ++-- dlordinal/soft_labelling/binomial_distribution.py | 6 +++--- dlordinal/soft_labelling/exponential_distribution.py | 6 +++--- .../soft_labelling/general_triangular_distribution.py | 8 ++++---- dlordinal/soft_labelling/poisson_distribution.py | 8 ++++---- .../soft_labelling/tests/test_poisson_distribution.py | 6 +++--- dlordinal/soft_labelling/triangular_distribution.py | 6 +++--- 9 files changed, 26 insertions(+), 26 deletions(-) diff --git a/dlordinal/losses/poisson_loss.py b/dlordinal/losses/poisson_loss.py index 5489f27..9df359e 100644 --- a/dlordinal/losses/poisson_loss.py +++ b/dlordinal/losses/poisson_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..soft_labelling import get_poisson_probabilities +from ..soft_labelling import get_poisson_softlabels from .custom_targets_loss import CustomTargetsCrossEntropyLoss @@ -59,7 +59,7 @@ def __init__( label_smoothing: float = 0.0, ): # Precompute class probabilities for each label - cls_probs = torch.tensor(get_poisson_probabilities(num_classes)).float() + cls_probs = torch.tensor(get_poisson_softlabels(num_classes)).float() super().__init__( cls_probs=cls_probs, diff --git a/dlordinal/soft_labelling/__init__.py b/dlordinal/soft_labelling/__init__.py index 571772a..0752b35 100644 --- a/dlordinal/soft_labelling/__init__.py +++ b/dlordinal/soft_labelling/__init__.py @@ -5,14 +5,14 @@ get_general_triangular_params, get_general_triangular_softlabels, ) -from .poisson_distribution import get_poisson_probabilities +from .poisson_distribution import get_poisson_softlabels from .triangular_distribution import get_triangular_softlabels __all__ = [ "get_beta_softlabels", "get_exponential_softlabels", "get_binomial_softlabels", - "get_poisson_probabilities", + "get_poisson_softlabels", "get_triangular_softlabels", "get_general_triangular_params", "get_general_triangular_softlabels", diff --git a/dlordinal/soft_labelling/beta_distribution.py b/dlordinal/soft_labelling/beta_distribution.py index 3bb7694..ff980f3 100644 --- a/dlordinal/soft_labelling/beta_distribution.py +++ b/dlordinal/soft_labelling/beta_distribution.py @@ -58,7 +58,7 @@ def beta_dist(x, p, q, a=1.0): def get_beta_softlabels(J, p, q, a=1.0): - """Get probabilities from a beta distribution :math:`B(p,q,a)` for ``J`` splits. + """Get soft labels from a beta distribution :math:`B(p,q,a)` for ``J`` splits. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for each interval is computed as the difference between the value of the distribution function in the upper limit of the interval and the value of the distribution @@ -90,7 +90,7 @@ class or split. Example ------- - >>> from dlordinal.distributions import get_beta_probabilities + >>> from dlordinal.soft_labelling import get_beta_probabilities >>> get_beta_probabilities(3, 2, 3) [0.4074074080000002, 0.48148148059259255, 0.11111111140740726] >>> get_beta_probabilities(5, 5, 1, a=2) diff --git a/dlordinal/soft_labelling/binomial_distribution.py b/dlordinal/soft_labelling/binomial_distribution.py index 893d4e1..4de06ce 100644 --- a/dlordinal/soft_labelling/binomial_distribution.py +++ b/dlordinal/soft_labelling/binomial_distribution.py @@ -3,7 +3,7 @@ def get_binomial_softlabels(J): - """Get probabilities for the binomial distribution for ``J`` classes or splits + """Get soft labels for the binomial distribution for ``J`` classes or splits using the approach described in :footcite:t:`liu2020unimodal`. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for each interval is computed as the difference between the value of the binomial @@ -34,8 +34,8 @@ def get_binomial_softlabels(J): Example ------- - >>> from dlordinal.distributions import get_binominal_probabilities - >>> get_binominal_probabilities(5) + >>> from dlordinal.soft_labelling import get_binomial_softlabels + >>> get_binomial_softlabels(5) array([[6.561e-01, 2.916e-01, 4.860e-02, 3.600e-03, 1.000e-04], [2.401e-01, 4.116e-01, 2.646e-01, 7.560e-02, 8.100e-03], [6.250e-02, 2.500e-01, 3.750e-01, 2.500e-01, 6.250e-02], diff --git a/dlordinal/soft_labelling/exponential_distribution.py b/dlordinal/soft_labelling/exponential_distribution.py index c000431..5aa0127 100644 --- a/dlordinal/soft_labelling/exponential_distribution.py +++ b/dlordinal/soft_labelling/exponential_distribution.py @@ -3,7 +3,7 @@ def get_exponential_softlabels(J, p=1.0, tau=1.0): - """Get probabilities from exponential distribution for ``J`` classes or splits as + """Get soft labels from exponential distribution for ``J`` classes or splits as described in :footcite:t:`liu2020unimodal` and :footcite:t:`vargas2023exponential`. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for each interval is computed as the difference between the value of the exponential @@ -39,8 +39,8 @@ def get_exponential_softlabels(J, p=1.0, tau=1.0): Example ------- - >>> from dlordinal.distributions import get_exponential_probabilities - >>> get_exponential_probabilities(5) + >>> from dlordinal.soft_labelling import get_exponential_softlabels + >>> get_exponential_softlabels(5) array([[0.63640865, 0.23412166, 0.08612854, 0.03168492, 0.01165623], [0.19151597, 0.52059439, 0.19151597, 0.07045479, 0.02591887], [0.06745081, 0.1833503 , 0.49839779, 0.1833503 , 0.06745081], diff --git a/dlordinal/soft_labelling/general_triangular_distribution.py b/dlordinal/soft_labelling/general_triangular_distribution.py index b5acfec..77be854 100644 --- a/dlordinal/soft_labelling/general_triangular_distribution.py +++ b/dlordinal/soft_labelling/general_triangular_distribution.py @@ -47,7 +47,7 @@ def get_general_triangular_params(J: int, alphas: np.ndarray, verbose: int = 0): Example ------- - >>> from dlordinal.distributions import get_general_triangular_params + >>> from dlordinal.soft_labelling import get_general_triangular_params >>> get_general_triangular_params(5, [0, 0.1, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0]) [{'alpha2j_1': 0, 'alpha2j': 0.05, 'a': 0, 'b': 0.25760143110525874, 'c': 0}, {'alpha2j_1': 0.05, 'alpha2j': 0.05, 'a': 0.153752470442574, 'b': 0.446247529557426, 'c': 0.3}, {'alpha2j_1': 0.05, 'alpha2j': 0.05, 'a': 0.353752470442574, 'b': 0.646247529557426, 'c': 0.5}, {'alpha2j_1': 0.05, 'alpha2j': 0.1, 'a': 0.550779686438060, 'b': 0.875486049708105, 'c': 0.7}, {'alpha2j_1': 0.0, 'alpha2j': 0, 'a': 0.8, 'b': 1, 'c': 1}] @@ -159,7 +159,7 @@ def abcj(J, j, alpha2j_1, alpha2j): def get_general_triangular_softlabels(J: int, alphas: np.ndarray, verbose: int = 0): - """Get probabilities from triangular distributions for ``J`` classes or splits. + """Get soft labels using triangular distributions for ``J`` classes or splits. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for each interval is computed as the difference between the value of the triangular distribution function for the interval boundaries. The probability for the first @@ -193,8 +193,8 @@ def get_general_triangular_softlabels(J: int, alphas: np.ndarray, verbose: int = Example ------- - >>> from dlordinal.distributions import get_general_triangular_probabilities - >>> get_general_triangular_probabilities( + >>> from dlordinal.soft_labelling import get_general_triangular_softlabels + >>> get_general_triangular_softlabels( ... 5, ... [0, 0.1, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0] ... ) diff --git a/dlordinal/soft_labelling/poisson_distribution.py b/dlordinal/soft_labelling/poisson_distribution.py index bc4f54e..295f1c7 100644 --- a/dlordinal/soft_labelling/poisson_distribution.py +++ b/dlordinal/soft_labelling/poisson_distribution.py @@ -3,8 +3,8 @@ from scipy.stats import poisson -def get_poisson_probabilities(J): - """Get probabilities from poisson distribution for ``J`` classes or splits using the +def get_poisson_softlabels(J): + """Get soft labels using poisson distributions for ``J`` classes or splits using the methodology described in :footcite:t:`liu2020unimodal`. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for each interval is computed as the difference between the value of the poisson @@ -36,8 +36,8 @@ def get_poisson_probabilities(J): Example ------- - >>> from dlordinal.distributions import get_poisson_probabilities - >>> get_poisson_probabilities(5) + >>> from dlordinal.soft_labelling import get_poisson_softlabels + >>> get_poisson_softlabels(5) array([[0.23414552, 0.23414552, 0.19480578, 0.17232403, 0.16457916], [0.18896888, 0.21635436, 0.21635436, 0.19768881, 0.18063359], [0.17822335, 0.19688341, 0.21214973, 0.21214973, 0.20059378], diff --git a/dlordinal/soft_labelling/tests/test_poisson_distribution.py b/dlordinal/soft_labelling/tests/test_poisson_distribution.py index c839e97..79eeb74 100644 --- a/dlordinal/soft_labelling/tests/test_poisson_distribution.py +++ b/dlordinal/soft_labelling/tests/test_poisson_distribution.py @@ -1,13 +1,13 @@ import numpy as np import pytest -from dlordinal.soft_labelling import get_poisson_probabilities +from dlordinal.soft_labelling import get_poisson_softlabels def test_get_poisson_probabilities(): # Case 1: n = 3 n = 3 - result = get_poisson_probabilities(n) + result = get_poisson_softlabels(n) # Verifies that the result is a matrix with n rows and n columns assert result.shape == (n, n) @@ -32,7 +32,7 @@ def test_get_poisson_probabilities(): # Case 2: n = 5 n = 5 - result = get_poisson_probabilities(n) + result = get_poisson_softlabels(n) print(result) # Verifies that the result is a matrix with n rows and n columns diff --git a/dlordinal/soft_labelling/triangular_distribution.py b/dlordinal/soft_labelling/triangular_distribution.py index 4deb6ef..bbe2c91 100644 --- a/dlordinal/soft_labelling/triangular_distribution.py +++ b/dlordinal/soft_labelling/triangular_distribution.py @@ -7,7 +7,7 @@ def get_triangular_softlabels(J: int, alpha2: float = 0.01, verbose: int = 0): """ - Get probabilities from triangular distributions for ``J`` classes or splits using + Get soft labels using triangular distributions for ``J`` classes or splits using the approach described in :footcite:t:`vargas2023softlabelling`. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for each interval is computed as the difference between the value of the triangular @@ -87,8 +87,8 @@ def get_triangular_softlabels(J: int, alpha2: float = 0.01, verbose: int = 0): Example ------- - >>> from dlordinal.distributions import get_triangular_probabilities - >>> get_triangular_probabilities(5) + >>> from dlordinal.soft_labelling import get_triangular_softlabels + >>> get_triangular_softlabels(5) array([[0.98845494, 0.01154505, 0. , 0. , 0. ], [0.01 , 0.98 , 0.01 , 0. , 0. ], [0. , 0.01 , 0.98 , 0.01 , 0. ], From 7a7b62d819b40e5cf4322cdd0ba3a2fa0de35928 Mon Sep 17 00:00:00 2001 From: tr1tu Date: Tue, 16 Apr 2024 09:33:59 +0200 Subject: [PATCH 5/5] changed softlabels for soft_labels --- dlordinal/losses/beta_loss.py | 4 ++-- dlordinal/losses/binomial_loss.py | 4 ++-- dlordinal/losses/exponential_loss.py | 4 ++-- dlordinal/losses/general_triangular_loss.py | 4 ++-- dlordinal/losses/poisson_loss.py | 4 ++-- dlordinal/losses/triangular_loss.py | 4 ++-- dlordinal/soft_labelling/__init__.py | 24 +++++++++---------- dlordinal/soft_labelling/beta_distribution.py | 8 +++---- .../soft_labelling/binomial_distribution.py | 6 ++--- .../exponential_distribution.py | 6 ++--- .../general_triangular_distribution.py | 6 ++--- .../soft_labelling/poisson_distribution.py | 6 ++--- .../tests/test_beta_distribution.py | 10 ++++---- .../tests/test_binomial_distribution.py | 6 ++--- .../tests/test_exponential_distribution.py | 8 +++---- .../test_general_triangular_distribution.py | 24 ++++++++++++++++++- .../tests/test_poisson_distribution.py | 8 +++---- .../tests/test_triangular_distribution.py | 12 +++++----- .../soft_labelling/triangular_distribution.py | 6 ++--- 19 files changed, 88 insertions(+), 66 deletions(-) diff --git a/dlordinal/losses/beta_loss.py b/dlordinal/losses/beta_loss.py index f0e5e2c..acd4125 100644 --- a/dlordinal/losses/beta_loss.py +++ b/dlordinal/losses/beta_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..soft_labelling import get_beta_softlabels +from ..soft_labelling import get_beta_soft_labels from .custom_targets_loss import CustomTargetsCrossEntropyLoss # Params [a,b] for beta distribution @@ -177,7 +177,7 @@ def __init__( # Precompute class probabilities for each label cls_probs = torch.tensor( [ - get_beta_softlabels( + get_beta_soft_labels( num_classes, self.params[num_classes][i][0], self.params[num_classes][i][1], diff --git a/dlordinal/losses/binomial_loss.py b/dlordinal/losses/binomial_loss.py index 8718051..4da22d2 100644 --- a/dlordinal/losses/binomial_loss.py +++ b/dlordinal/losses/binomial_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..soft_labelling import get_binomial_softlabels +from ..soft_labelling import get_binomial_soft_labels from .custom_targets_loss import CustomTargetsCrossEntropyLoss @@ -59,7 +59,7 @@ def __init__( label_smoothing: float = 0.0, ): # Precompute class probabilities for each label - cls_probs = torch.tensor(get_binomial_softlabels(num_classes)).float() + cls_probs = torch.tensor(get_binomial_soft_labels(num_classes)).float() super().__init__( cls_probs=cls_probs, diff --git a/dlordinal/losses/exponential_loss.py b/dlordinal/losses/exponential_loss.py index 7d87a24..475a4bb 100644 --- a/dlordinal/losses/exponential_loss.py +++ b/dlordinal/losses/exponential_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..soft_labelling import get_exponential_softlabels +from ..soft_labelling import get_exponential_soft_labels from .custom_targets_loss import CustomTargetsCrossEntropyLoss @@ -63,7 +63,7 @@ def __init__( label_smoothing: float = 0.0, ): # Precompute class probabilities for each label - cls_probs = torch.tensor(get_exponential_softlabels(num_classes, p)).float() + cls_probs = torch.tensor(get_exponential_soft_labels(num_classes, p)).float() super().__init__( cls_probs=cls_probs, diff --git a/dlordinal/losses/general_triangular_loss.py b/dlordinal/losses/general_triangular_loss.py index a2ba122..edb655a 100644 --- a/dlordinal/losses/general_triangular_loss.py +++ b/dlordinal/losses/general_triangular_loss.py @@ -4,7 +4,7 @@ import torch from torch import Tensor -from ..soft_labelling import get_general_triangular_softlabels +from ..soft_labelling import get_general_triangular_soft_labels from .custom_targets_loss import CustomTargetsCrossEntropyLoss @@ -63,7 +63,7 @@ def __init__( label_smoothing: float = 0.0, ): # Precompute class probabilities for each label - r = get_general_triangular_softlabels(num_classes, alphas, verbose=0) + r = get_general_triangular_soft_labels(num_classes, alphas, verbose=0) cls_probs = torch.tensor(r) super().__init__( diff --git a/dlordinal/losses/poisson_loss.py b/dlordinal/losses/poisson_loss.py index 9df359e..eb677c2 100644 --- a/dlordinal/losses/poisson_loss.py +++ b/dlordinal/losses/poisson_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..soft_labelling import get_poisson_softlabels +from ..soft_labelling import get_poisson_soft_labels from .custom_targets_loss import CustomTargetsCrossEntropyLoss @@ -59,7 +59,7 @@ def __init__( label_smoothing: float = 0.0, ): # Precompute class probabilities for each label - cls_probs = torch.tensor(get_poisson_softlabels(num_classes)).float() + cls_probs = torch.tensor(get_poisson_soft_labels(num_classes)).float() super().__init__( cls_probs=cls_probs, diff --git a/dlordinal/losses/triangular_loss.py b/dlordinal/losses/triangular_loss.py index fcf2022..85f8326 100644 --- a/dlordinal/losses/triangular_loss.py +++ b/dlordinal/losses/triangular_loss.py @@ -3,7 +3,7 @@ import torch from torch import Tensor -from ..soft_labelling import get_triangular_softlabels +from ..soft_labelling import get_triangular_soft_labels from .custom_targets_loss import CustomTargetsCrossEntropyLoss @@ -62,7 +62,7 @@ def __init__( label_smoothing: float = 0.0, ): # Precompute class probabilities for each label - cls_probs = torch.tensor(get_triangular_softlabels(num_classes, alpha2)) + cls_probs = torch.tensor(get_triangular_soft_labels(num_classes, alpha2)) super().__init__( cls_probs=cls_probs, eta=eta, diff --git a/dlordinal/soft_labelling/__init__.py b/dlordinal/soft_labelling/__init__.py index 0752b35..06542ee 100644 --- a/dlordinal/soft_labelling/__init__.py +++ b/dlordinal/soft_labelling/__init__.py @@ -1,19 +1,19 @@ -from .beta_distribution import get_beta_softlabels -from .binomial_distribution import get_binomial_softlabels -from .exponential_distribution import get_exponential_softlabels +from .beta_distribution import get_beta_soft_labels +from .binomial_distribution import get_binomial_soft_labels +from .exponential_distribution import get_exponential_soft_labels from .general_triangular_distribution import ( get_general_triangular_params, - get_general_triangular_softlabels, + get_general_triangular_soft_labels, ) -from .poisson_distribution import get_poisson_softlabels -from .triangular_distribution import get_triangular_softlabels +from .poisson_distribution import get_poisson_soft_labels +from .triangular_distribution import get_triangular_soft_labels __all__ = [ - "get_beta_softlabels", - "get_exponential_softlabels", - "get_binomial_softlabels", - "get_poisson_softlabels", - "get_triangular_softlabels", + "get_beta_soft_labels", + "get_exponential_soft_labels", + "get_binomial_soft_labels", + "get_poisson_soft_labels", + "get_triangular_soft_labels", "get_general_triangular_params", - "get_general_triangular_softlabels", + "get_general_triangular_soft_labels", ] diff --git a/dlordinal/soft_labelling/beta_distribution.py b/dlordinal/soft_labelling/beta_distribution.py index ff980f3..363a6ad 100644 --- a/dlordinal/soft_labelling/beta_distribution.py +++ b/dlordinal/soft_labelling/beta_distribution.py @@ -57,7 +57,7 @@ def beta_dist(x, p, q, a=1.0): return (x ** (a * p)) / (p * beta_func(p, q)) * hyp2f1(p, 1 - q, p + 1, x**a) -def get_beta_softlabels(J, p, q, a=1.0): +def get_beta_soft_labels(J, p, q, a=1.0): """Get soft labels from a beta distribution :math:`B(p,q,a)` for ``J`` splits. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for each interval is computed as the difference between the value of the distribution @@ -90,10 +90,10 @@ class or split. Example ------- - >>> from dlordinal.soft_labelling import get_beta_probabilities - >>> get_beta_probabilities(3, 2, 3) + >>> from dlordinal.soft_labelling import get_beta_soft_labels + >>> get_beta_soft_labels(3, 2, 3) [0.4074074080000002, 0.48148148059259255, 0.11111111140740726] - >>> get_beta_probabilities(5, 5, 1, a=2) + >>> get_beta_soft_labels(5, 5, 1, a=2) [1.0240000307200007e-07, 0.00010475520052121611, 0.005941759979320316, 0.10132756401484902, 0.8926258084053067] """ diff --git a/dlordinal/soft_labelling/binomial_distribution.py b/dlordinal/soft_labelling/binomial_distribution.py index 4de06ce..f3b6971 100644 --- a/dlordinal/soft_labelling/binomial_distribution.py +++ b/dlordinal/soft_labelling/binomial_distribution.py @@ -2,7 +2,7 @@ from scipy.stats import binom -def get_binomial_softlabels(J): +def get_binomial_soft_labels(J): """Get soft labels for the binomial distribution for ``J`` classes or splits using the approach described in :footcite:t:`liu2020unimodal`. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for @@ -34,8 +34,8 @@ def get_binomial_softlabels(J): Example ------- - >>> from dlordinal.soft_labelling import get_binomial_softlabels - >>> get_binomial_softlabels(5) + >>> from dlordinal.soft_labelling import get_binomial_soft_labels + >>> get_binomial_soft_labels(5) array([[6.561e-01, 2.916e-01, 4.860e-02, 3.600e-03, 1.000e-04], [2.401e-01, 4.116e-01, 2.646e-01, 7.560e-02, 8.100e-03], [6.250e-02, 2.500e-01, 3.750e-01, 2.500e-01, 6.250e-02], diff --git a/dlordinal/soft_labelling/exponential_distribution.py b/dlordinal/soft_labelling/exponential_distribution.py index 5aa0127..6aaa73e 100644 --- a/dlordinal/soft_labelling/exponential_distribution.py +++ b/dlordinal/soft_labelling/exponential_distribution.py @@ -2,7 +2,7 @@ from scipy.special import softmax -def get_exponential_softlabels(J, p=1.0, tau=1.0): +def get_exponential_soft_labels(J, p=1.0, tau=1.0): """Get soft labels from exponential distribution for ``J`` classes or splits as described in :footcite:t:`liu2020unimodal` and :footcite:t:`vargas2023exponential`. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for @@ -39,8 +39,8 @@ def get_exponential_softlabels(J, p=1.0, tau=1.0): Example ------- - >>> from dlordinal.soft_labelling import get_exponential_softlabels - >>> get_exponential_softlabels(5) + >>> from dlordinal.soft_labelling import get_exponential_soft_labels + >>> get_exponential_soft_labels(5) array([[0.63640865, 0.23412166, 0.08612854, 0.03168492, 0.01165623], [0.19151597, 0.52059439, 0.19151597, 0.07045479, 0.02591887], [0.06745081, 0.1833503 , 0.49839779, 0.1833503 , 0.06745081], diff --git a/dlordinal/soft_labelling/general_triangular_distribution.py b/dlordinal/soft_labelling/general_triangular_distribution.py index 77be854..38f83f3 100644 --- a/dlordinal/soft_labelling/general_triangular_distribution.py +++ b/dlordinal/soft_labelling/general_triangular_distribution.py @@ -158,7 +158,7 @@ def abcj(J, j, alpha2j_1, alpha2j): return params -def get_general_triangular_softlabels(J: int, alphas: np.ndarray, verbose: int = 0): +def get_general_triangular_soft_labels(J: int, alphas: np.ndarray, verbose: int = 0): """Get soft labels using triangular distributions for ``J`` classes or splits. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for each interval is computed as the difference between the value of the triangular @@ -193,8 +193,8 @@ def get_general_triangular_softlabels(J: int, alphas: np.ndarray, verbose: int = Example ------- - >>> from dlordinal.soft_labelling import get_general_triangular_softlabels - >>> get_general_triangular_softlabels( + >>> from dlordinal.soft_labelling import get_general_triangular_soft_labels + >>> get_general_triangular_soft_labels( ... 5, ... [0, 0.1, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0] ... ) diff --git a/dlordinal/soft_labelling/poisson_distribution.py b/dlordinal/soft_labelling/poisson_distribution.py index 295f1c7..01b0dad 100644 --- a/dlordinal/soft_labelling/poisson_distribution.py +++ b/dlordinal/soft_labelling/poisson_distribution.py @@ -3,7 +3,7 @@ from scipy.stats import poisson -def get_poisson_softlabels(J): +def get_poisson_soft_labels(J): """Get soft labels using poisson distributions for ``J`` classes or splits using the methodology described in :footcite:t:`liu2020unimodal`. The :math:`[0,1]` interval is split into ``J`` intervals and the probability for @@ -36,8 +36,8 @@ def get_poisson_softlabels(J): Example ------- - >>> from dlordinal.soft_labelling import get_poisson_softlabels - >>> get_poisson_softlabels(5) + >>> from dlordinal.soft_labelling import get_poisson_soft_labels + >>> get_poisson_soft_labels(5) array([[0.23414552, 0.23414552, 0.19480578, 0.17232403, 0.16457916], [0.18896888, 0.21635436, 0.21635436, 0.19768881, 0.18063359], [0.17822335, 0.19688341, 0.21214973, 0.21214973, 0.20059378], diff --git a/dlordinal/soft_labelling/tests/test_beta_distribution.py b/dlordinal/soft_labelling/tests/test_beta_distribution.py index d754c99..c147441 100644 --- a/dlordinal/soft_labelling/tests/test_beta_distribution.py +++ b/dlordinal/soft_labelling/tests/test_beta_distribution.py @@ -1,7 +1,7 @@ import numpy as np import pytest -from dlordinal.soft_labelling import get_beta_softlabels +from dlordinal.soft_labelling import get_beta_soft_labels from dlordinal.soft_labelling.beta_distribution import beta_dist, beta_func @@ -23,7 +23,7 @@ def test_beta_inc_negative_values(a, b): beta_func(a, b) -def test_beta_distribution(): +def test_beta_soft_label(): # Case 1: Valid input x = 0.5 p = 2.0 @@ -52,13 +52,13 @@ def test_beta_distribution_negative_x(x): beta_dist(x, 2.0, 3.0, 1.0) -def test_beta_probabilities(): +def test_beta_soft_labels(): # Case 1: Valid input n = 5 p = 2.0 q = 3.0 a = 1.0 - result = get_beta_softlabels(n, p, q, a) + result = get_beta_soft_labels(n, p, q, a) expected_result = [ 0.1808000009216, 0.34399999942400017, @@ -75,7 +75,7 @@ def test_beta_probabilities(): p = 1.5 q = 2.5 a = 2.0 - result = get_beta_softlabels(n, p, q, a) + result = get_beta_soft_labels(n, p, q, a) expected_result = [ 0.05010107325697135, 0.283232260076362, diff --git a/dlordinal/soft_labelling/tests/test_binomial_distribution.py b/dlordinal/soft_labelling/tests/test_binomial_distribution.py index a849b66..bdeb513 100644 --- a/dlordinal/soft_labelling/tests/test_binomial_distribution.py +++ b/dlordinal/soft_labelling/tests/test_binomial_distribution.py @@ -1,13 +1,13 @@ import numpy as np import pytest -from dlordinal.soft_labelling import get_binomial_softlabels +from dlordinal.soft_labelling import get_binomial_soft_labels -def test_get_binomial_probabilities(): +def test_get_binomial_soft_labels(): # Case 1: n = 5 n = 5 - result = get_binomial_softlabels(n) + result = get_binomial_soft_labels(n) expected_result = np.array( [ [6.561e-01, 2.916e-01, 4.860e-02, 3.600e-03, 1.000e-04], diff --git a/dlordinal/soft_labelling/tests/test_exponential_distribution.py b/dlordinal/soft_labelling/tests/test_exponential_distribution.py index c3f900a..3a0a8fb 100644 --- a/dlordinal/soft_labelling/tests/test_exponential_distribution.py +++ b/dlordinal/soft_labelling/tests/test_exponential_distribution.py @@ -1,14 +1,14 @@ import numpy as np import pytest -from dlordinal.soft_labelling import get_exponential_softlabels +from dlordinal.soft_labelling import get_exponential_soft_labels -def test_get_exponential_probabilities(): +def test_get_exponential_soft_labels(): n = 5 p = 1.0 tau = 1.0 - result = get_exponential_softlabels(n, p, tau) + result = get_exponential_soft_labels(n, p, tau) expected_result = np.array( [ [0.63640865, 0.23412166, 0.08612854, 0.03168492, 0.01165623], @@ -35,7 +35,7 @@ def test_exponential_probabilities(): n = 4 p = 2.0 tau = 1.0 - result = get_exponential_softlabels(n, p, tau) + result = get_exponential_soft_labels(n, p, tau) expected_result = np.array( [ [7.21334965e-01, 2.65364304e-01, 1.32117107e-02, 8.90198068e-05], diff --git a/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py b/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py index bf0ca61..80308be 100644 --- a/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py +++ b/dlordinal/soft_labelling/tests/test_general_triangular_distribution.py @@ -3,7 +3,10 @@ import numpy as np import pytest -from dlordinal.soft_labelling import get_general_triangular_params +from dlordinal.soft_labelling import ( + get_general_triangular_params, + get_general_triangular_soft_labels, +) def test_get_general_triangular_params(): @@ -115,3 +118,22 @@ def test_wrong_alpha_shape(): match=re.escape("alphas must be a numpy array of shape (2 * n,), but got (7,)"), ): get_general_triangular_params(n, alphas) + + +def test_general_triangular_soft_labels(): + n = 3 + alphas = np.array( + [0.05518804, 0.14000449, 0.0586412, 0.03018706, 0.15230179, 0.03493327] + ) + result = get_general_triangular_soft_labels(n, alphas) + expected_result = [ + [0.9413588, 0.0586412, 0.0], + [0.03018706, 0.81751114, 0.15230179], + [0.0, 0.03493327, 0.96506673], + ] + + assert len(result.shape) == 2 + assert result.shape[0] == n + assert result.shape[1] == n + + assert np.allclose(result, expected_result, rtol=1e-6) diff --git a/dlordinal/soft_labelling/tests/test_poisson_distribution.py b/dlordinal/soft_labelling/tests/test_poisson_distribution.py index 79eeb74..933269f 100644 --- a/dlordinal/soft_labelling/tests/test_poisson_distribution.py +++ b/dlordinal/soft_labelling/tests/test_poisson_distribution.py @@ -1,13 +1,13 @@ import numpy as np import pytest -from dlordinal.soft_labelling import get_poisson_softlabels +from dlordinal.soft_labelling import get_poisson_soft_labels -def test_get_poisson_probabilities(): +def test_get_poisson_soft_labels(): # Case 1: n = 3 n = 3 - result = get_poisson_softlabels(n) + result = get_poisson_soft_labels(n) # Verifies that the result is a matrix with n rows and n columns assert result.shape == (n, n) @@ -32,7 +32,7 @@ def test_get_poisson_probabilities(): # Case 2: n = 5 n = 5 - result = get_poisson_softlabels(n) + result = get_poisson_soft_labels(n) print(result) # Verifies that the result is a matrix with n rows and n columns diff --git a/dlordinal/soft_labelling/tests/test_triangular_distribution.py b/dlordinal/soft_labelling/tests/test_triangular_distribution.py index e3a52a5..d653cec 100644 --- a/dlordinal/soft_labelling/tests/test_triangular_distribution.py +++ b/dlordinal/soft_labelling/tests/test_triangular_distribution.py @@ -1,16 +1,16 @@ import numpy as np import pytest -from dlordinal.soft_labelling import get_triangular_softlabels +from dlordinal.soft_labelling import get_triangular_soft_labels -def test_get_triangular_probabilities(): +def test_get_triangular_soft_labels(): # Case 1 n = 5 alpha2 = 0.01 verbose = 0 - result = get_triangular_softlabels(n, alpha2, verbose) + result = get_triangular_soft_labels(n, alpha2, verbose) expected_result = [ [0.98845494, 0.01154505, 0.0, 0.0, 0.0], @@ -39,7 +39,7 @@ def test_get_triangular_probabilities(): alpha2 = 0.01 verbose = 0 - result = get_triangular_softlabels(n, alpha2, verbose) + result = get_triangular_soft_labels(n, alpha2, verbose) expected_result = [ [0.98845494, 0.01154505, 0.0, 0.0, 0.0, 0.0, 0.0], @@ -66,13 +66,13 @@ def test_get_triangular_probabilities(): np.testing.assert_allclose(result, expected_result, rtol=1e-6) -def test_get_triangular_probabilities_verbose(): +def test_get_triangular_soft_labels_verbose(): # Case 1 n = 4 alpha2 = 0.01 verbose = 4 - result = get_triangular_softlabels(n, alpha2, verbose) + result = get_triangular_soft_labels(n, alpha2, verbose) expected_result = [ [ diff --git a/dlordinal/soft_labelling/triangular_distribution.py b/dlordinal/soft_labelling/triangular_distribution.py index bbe2c91..d78a45b 100644 --- a/dlordinal/soft_labelling/triangular_distribution.py +++ b/dlordinal/soft_labelling/triangular_distribution.py @@ -5,7 +5,7 @@ from .utils import get_intervals, triangular_cdf -def get_triangular_softlabels(J: int, alpha2: float = 0.01, verbose: int = 0): +def get_triangular_soft_labels(J: int, alpha2: float = 0.01, verbose: int = 0): """ Get soft labels using triangular distributions for ``J`` classes or splits using the approach described in :footcite:t:`vargas2023softlabelling`. @@ -87,8 +87,8 @@ def get_triangular_softlabels(J: int, alpha2: float = 0.01, verbose: int = 0): Example ------- - >>> from dlordinal.soft_labelling import get_triangular_softlabels - >>> get_triangular_softlabels(5) + >>> from dlordinal.soft_labelling import get_triangular_soft_labels + >>> get_triangular_soft_labels(5) array([[0.98845494, 0.01154505, 0. , 0. , 0. ], [0.01 , 0.98 , 0.01 , 0. , 0. ], [0. , 0.01 , 0.98 , 0.01 , 0. ],