diff --git a/tests/packages/pyeeg/entropy.py b/tests/packages/pyeeg/entropy.py index ebd4467168..bfe7c8803c 100644 --- a/tests/packages/pyeeg/entropy.py +++ b/tests/packages/pyeeg/entropy.py @@ -1,6 +1,6 @@ import numpy from .embedded_sequence import embed_seq - +from .spectrum import bin_power def ap_entropy(X, M, R): """Computer approximate entropy (ApEN) of series X, specified by M and R. diff --git a/tests/packages/pyrem/univariate.py b/tests/packages/pyrem/univariate.py index 7492f95ea5..d74f0f5d4a 100644 --- a/tests/packages/pyrem/univariate.py +++ b/tests/packages/pyrem/univariate.py @@ -427,9 +427,9 @@ def hfd(a, k_max): # km_idxs[:,1] -= 1 # - for k in xrange(1,k_max): + for k in range(1,k_max): Lk = 0 - for m in xrange(0,k): + for m in range(0,k): #we pregenerate all idxs idxs = np.arange(1,int(np.floor((N-m)/k)),dtype=np.int32) @@ -465,7 +465,7 @@ def dfa(X, Ave = None, L = None, sampling= 1): for i,n in enumerate(L): sampled = 0 - for j in xrange(0,len(X) -n ,n): + for j in range(0,len(X) -n ,n): if np.random.rand() < sampling: F[i] += np.polyfit(np.arange(j,j+n), Y[j:j+n],1, full=True)[1] diff --git a/tests/tests_complexity.py b/tests/tests_complexity.py index 36e116628b..eb41fb5d2f 100644 --- a/tests/tests_complexity.py +++ b/tests/tests_complexity.py @@ -28,8 +28,9 @@ def test_complexity(): # assert nk.entropy_approximate(signal, 2, 0.2) == pyeeg.ap_entropy(signal, 2, 0.2) # Sample - assert np.allclose(nk.entropy_sample(signal, order=2, r=0.2*np.std(signal)), nolds.sampen(signal, emb_dim=2, tolerance=0.2*np.std(signal)), atol=0.000001) -# assert nk.entropy_shannon(signal, 2, 0.2) == pyrem.samp_entropy(signal, 2, 0.2, relative_r=False) + assert np.allclose(nk.entropy_sample(signal, order=2, r=0.2*np.std(signal)), + nolds.sampen(signal, emb_dim=2, tolerance=0.2*np.std(signal)), atol=0.000001) +# assert nk.entropy_sample(signal, 2, 0.2) == pyeeg.samp_entropy(signal, 2, 0.2) # pyentrp.sample_entropy(signal, 2, 0.2) # Gives something different # Fuzzy