Skip to content

Commit

Permalink
Fixed a bug inNormalizedRegressorNc.
Browse files Browse the repository at this point in the history
 NormalizedRegressorNc was sometimes misbehaving due to trying to calculate log(0). Fixed this by adding a small error term to each training example before taking their log error.
  • Loading branch information
donlnz committed May 6, 2015
1 parent cf0c9a4 commit 4f2f85d
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 2 deletions.
2 changes: 1 addition & 1 deletion nonconformist/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@

# Authors: Henrik Linusson

__version__ = '1.2.0'
__version__ = '1.2.1'

__all__ = ['icp', 'nc', 'acp']
6 changes: 5 additions & 1 deletion nonconformist/nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,9 @@ def __init__(self,

def fit(self, x, y):
super(NormalizedRegressorNc, self).fit(x, y)
log_err = np.log(np.abs(self._underlying_predict(x) - y))
err = np.abs(self._underlying_predict(x) - y)
err += 0.00001 # Add a small error to each sample to avoid log(0)
log_err = np.log(err)
self.normalizer.fit(x, log_err)

def calc_nc(self, x, y):
Expand All @@ -464,6 +466,8 @@ def calc_nc(self, x, y):
else:
self.beta_ = self.beta

print(norm)

return self.err_func(prediction, y, norm, self.beta_)

def predict(self, x, nc, significance=None):
Expand Down

0 comments on commit 4f2f85d

Please sign in to comment.