Skip to content

Commit

Permalink
A bug was fixed in complexity.py. One marker per measure is being cre…
Browse files Browse the repository at this point in the history
…ated now.
  • Loading branch information
omidvarnia committed Nov 25, 2022
1 parent cd77681 commit b2a11fa
Show file tree
Hide file tree
Showing 3 changed files with 108 additions and 87 deletions.
47 changes: 28 additions & 19 deletions junifer/markers/complexity.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

from typing import TYPE_CHECKING, Any, Dict, List, Optional

# from ptpython.repl import embed
from ..api.decorators import register_marker
from ..utils import logger
from .base import BaseMarker
Expand All @@ -29,6 +30,19 @@ class Complexity(BaseMarker):
aggregation_method : str, optional
The method to perform aggregation using. Check valid options in
:func:`junifer.stats.get_aggfunc_by_name` (default "mean").
measure_type: dict
A dictionary including the name of the desired complexity measure
to be extracted and its associated parameters. The measures and
their default values include:
{"_range_entropy": {"m": 2, "tol": 0.5}}
{"_range_entropy_auc": {"m": 2, "n_r": 10}}
{"_perm_entropy": {"m": 4, "tau": 1}}
{"_weighted_perm_entropy": {"m": 4, "tau": 1}}
{"_sample_entropy": {"m": 2, "tau": 1, "tol": 0.5}}
{"_multiscale_entropy_auc": {"m": 2, "tol": 0.5, "scale": 10}}
{"_hurst_exponent": {"reserved": None}}
name : str, optional
The name of the marker. If None, will use the class name (default
None).
Expand All @@ -38,26 +52,18 @@ class Complexity(BaseMarker):
def __init__(
self,
parcellation: str,
measure_types: dict = None,
measure_type: dict = None,
aggregation_method: str = "mean",
name: Optional[str] = None,
) -> None:
self.parcellation = parcellation
self.aggregation_method = aggregation_method
# measure_types should be a dctionary with keys as the function names,
# measure_type should be a dctionary with keys as the function names,
# and values as another dictionary with function parameters.
if measure_types is None:
self.measure_types = {
"_range_entropy": {"m": 2, "tol": 0.5},
"_range_entropy_auc": {"m": 2, "n_r": 10},
"_perm_entropy": {"m": 4, "tau": 1},
"_weighted_perm_entropy": {"m": 4, "tau": 1},
"_sample_entropy": {"m": 2, "tau": 1, "tol": 0.5},
"_multiscale_entropy_auc": {"m": 2, "tol": 0.5, "scale": 10},
"_hurst_exponent": {"reserved": None},
}
if measure_type is None:
self.measure_type = {"_range_entropy": {"m": 2, "tol": 0.5}}
else:
self.measure_types = measure_types
self.measure_type = measure_type

super().__init__(name=name)

Expand Down Expand Up @@ -142,13 +148,13 @@ def compute(
References
----------
.. [1] A. Omidvarnia et al.
.. [1] Omidvarnia, A., et al.
Range Entropy: A Bridge between Signal Complexity and
Self-Similarity, Entropy, vol. 20, no. 12, p. 962, 2018.
.. [2] Bandt, C., & Pompe, B.
Permutation entropy: a natural complexity measure for time
series. Physical review letters, 88(17), 174102, 2002
series. Physical review letters, 88(17), 174102, 2002.
.. [3] Fadlallah, B., Chen, B., Keil, A., & Principe, J.
Weighted-permutation entropy: A complexity measure for time
Expand All @@ -167,10 +173,13 @@ def compute(
.. [6] Peng, C.; Havlin, S.; Stanley, H.E.; Goldberger, A.L.
Quantification of scaling exponents and crossover phenomena in
nonstationary heartbeat time series.
Chaos Interdiscip. J. Nonlinear Sci., 5, 82–87, 1995
Chaos Interdiscip. J. Nonlinear Sci., 5, 82–87, 1995.
"""
# print('Stop: complexity_compute')
# embed(globals(), locals())

logger.debug("Calculating root sum of squares of edgewise timeseries.")
# Initialize a ParcelAggregation
parcel_aggregation = ParcelAggregation(
Expand All @@ -184,10 +193,10 @@ def compute(

# Calculate complexity and et correct column/row labels
bold_ts = pa_dict["data"]
tmp = _calculate_complexity(bold_ts, self.measure_types)
tmp = _calculate_complexity(bold_ts, self.measure_type)
out = {}
out["data"] = tmp
out["col_names"] = self.measure_types.keys()
out["data"] = tmp.T
out["col_names"] = self.measure_type.keys()
out["row_names"] = pa_dict["columns"]

return out
20 changes: 10 additions & 10 deletions junifer/markers/tests/test_complexity.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,18 +31,18 @@ def test_compute() -> None:
# Create input data
input_dict = {"data": niimg, "path": out["BOLD"]["path"]}
# Create input data
measure_types = {
"_range_entropy": {"m": 2, "tol": 0.5},
"_range_entropy_auc": {"m": 2, "n_r": 10},
"_perm_entropy": {"m": 4, "tau": 1},
"_weighted_perm_entropy": {"m": 4, "tau": 1},
"_sample_entropy": {"m": 2, "tau": 1, "tol": 0.5},
"_multiscale_entropy_auc": {"m": 2, "tol": 0.5, "scale": 2},
"_hurst_exponent": {"reserved": None},
}
measure_type = {"_range_entropy": {"m": 2, "tol": 0.5}}
# measure_type = {"_range_entropy_auc": {"m": 2, "n_r": 10}}
# measure_type = {"_perm_entropy": {"m": 4, "tau": 1}}
# measure_type = {"_weighted_perm_entropy": {"m": 4, "tau": 1}}
# measure_type = {"_sample_entropy": {"m": 2, "tau": 1, "tol": 0.5}}
# measure_type = {"_multiscale_entropy_auc":
# {"m": 2, "tol": 0.5, "scale": 10}}
# measure_type = {"_hurst_exponent": {"reserved": None}}

# Compute the Complexity markers
complexity = Complexity(
parcellation=PARCELLATION, measure_types=measure_types
parcellation=PARCELLATION, measure_type=measure_type
)
new_out = complexity.compute(input_dict)

Expand Down
Loading

0 comments on commit b2a11fa

Please sign in to comment.