diff --git a/GPy_ABCD/KernelExpressions/commutatives.py b/GPy_ABCD/KernelExpressions/commutatives.py index f015889..a647467 100644 --- a/GPy_ABCD/KernelExpressions/commutatives.py +++ b/GPy_ABCD/KernelExpressions/commutatives.py @@ -5,6 +5,7 @@ from GPy_ABCD.KernelExpressions.commutative_base import KernelExpression, SumOrProductKE from GPy_ABCD.KernelExpansion.kernelOperations import * from GPy_ABCD.KernelExpansion.kernelInterpretation import * +import GPy_ABCD.config as config class SumKE(SumOrProductKE): @@ -15,6 +16,10 @@ def simplify_base_terms(self): # WN and C are addition-idempotent if self.base_terms['WN'] > 1: self.base_terms['WN'] = 1 if self.base_terms['C'] > 1: self.base_terms['C'] = 1 + + # If an offset-including LIN is used, remove any C in their presence (going through __dict__ because the class name gets prepended otherwise) + if config.__dict__['__USE_LIN_KERNEL_HORIZONTAL_OFFSET'] and self.base_terms['LIN'] > 0: self.base_terms['C'] = 0 + self.base_terms = + self.base_terms return self diff --git a/GPy_ABCD/Kernels/baseKernels.py b/GPy_ABCD/Kernels/baseKernels.py index 7b4a67f..f8d79a2 100644 --- a/GPy_ABCD/Kernels/baseKernels.py +++ b/GPy_ABCD/Kernels/baseKernels.py @@ -31,8 +31,8 @@ def C(): return _Gk.Bias(1) # def LIN(): return _Gk.Linear(1) # Not the same as ABCD's; missing horizontal offset def LIN(): return _Lk.Linear(1) # Not the same as ABCD's; missing horizontal offset -if __USE_LIN_KERNEL_HORIZONTAL_OFFSET: # The version in ABCD; not sure if a good idea; the horizontal offset is the same as a vertical one, which is just kC - def LIN(): return _LOk.LinearWithOffset(1) +if __USE_LIN_KERNEL_HORIZONTAL_OFFSET: # This flag also enables LIN + C -> LIN simplification + def LIN(): return _LOk.LinearWithOffset(1) # The version in ABCD; the horizontal offset is the same as a vertical one, which is just kC def SE(): return _Gk.RBF(1) diff --git a/GPy_ABCD/Util/modelUtil.py b/GPy_ABCD/Util/modelUtil.py index 1df490e..d7b81d7 100644 --- a/GPy_ABCD/Util/modelUtil.py +++ b/GPy_ABCD/Util/modelUtil.py @@ -22,12 +22,15 @@ def LA_LOO(m, ll, n, k): return np.mean(m.inference_method.LOO(m.kern, m.X, m.Y, GPy_optimisers = ['lbfgsb', 'org-bfgs', 'fmin_tnc', 'scg', 'simplex', 'adadelta', 'rprop', 'adam'] -def model_printout(m): +def model_printout(m, plotly = False): print(m.kernel_expression) print(m.model.kern) print(f'Log-Lik: {m.model.log_likelihood()}') print(f'{m.cached_utility_function_type}: {m.cached_utility_function}') - m.plot() + if plotly: + m.change_plotting_library(library = 'plotly_offline') + m.plot()[0].show() + else: m.plot() print(m.interpret()) diff --git a/GPy_ABCD/__init__.py b/GPy_ABCD/__init__.py index b7c3feb..fff6c1b 100644 --- a/GPy_ABCD/__init__.py +++ b/GPy_ABCD/__init__.py @@ -1,6 +1,6 @@ """GPy-ABCD - Basic implementation with GPy of an Automatic Bayesian Covariance Discovery (ABCD) system""" -__version__ = '1.2' # Change it in setup.py too +__version__ = '1.2.1' # Change it in setup.py too __author__ = 'Thomas Fletcher ' # __all__ = [] diff --git a/README.rst b/README.rst index 57d334e..b3af015 100644 --- a/README.rst +++ b/README.rst @@ -75,11 +75,11 @@ A minimal example to showcase the various parameters follows: print('\n\nTop-3 models\' details:') for bm in best_mods[:3]: - model_printout(bm) # See the definition of this convenience function for examples of model details' extraction + model_printout(bm, plotly = False) # See the definition of this convenience function for examples of model details' extraction print('Prediction at X = 11:', bm.predict(np.array([11])[:, None]), '\n') from matplotlib import pyplot as plt - plt.show() + plt.show() # Not required for plotly = True above @@ -158,8 +158,8 @@ Further Notes Generic: -- Please let know me if you have successfully used this project in your own research -- Please feel free to fork and expand this project (pull requests are welcome) since it is not the focus of my research; it was written just because I needed to use it in a broader adaptive statistical modelling context and therefore I have no need to expand its functionality in the near future +- Please reach out if you have successfully used this project in your own research +- Feel free to fork and expand this project (pull requests are welcome) since it is not the focus of my research; it was written just because I needed to use it in a broader adaptive statistical modelling context and therefore I have no need to expand its functionality in the near future Code-related: diff --git a/Tests/checkModelSearchREADMEexample.py b/Tests/checkModelSearchREADMEexample.py index 8692e71..5579c96 100644 --- a/Tests/checkModelSearchREADMEexample.py +++ b/Tests/checkModelSearchREADMEexample.py @@ -19,10 +19,10 @@ print('\n\nTop-3 models\' details:') for bm in best_mods[:3]: - model_printout(bm) # See the definition of this convenience function for examples of model details' extraction + model_printout(bm, plotly = False) # See the definition of this convenience function for examples of model details' extraction print('Prediction at X = 11:', bm.predict(np.array([11])[:, None]), '\n') from matplotlib import pyplot as plt - plt.show() + plt.show() # Not required for plotly = True above diff --git a/Tests/test_grammar.py b/Tests/test_grammar.py index fa85f31..66d6d0b 100644 --- a/Tests/test_grammar.py +++ b/Tests/test_grammar.py @@ -86,3 +86,4 @@ def test_specific_expansions(self, es): for e in es: print(e) print(len(es)) + diff --git a/setup.py b/setup.py index 3a658c9..779f15b 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ def read_requirements(): setup( name = 'GPy-ABCD', - version = '1.2', # Change it in __init__.py too + version = '1.2.1', # Change it in __init__.py too url = 'https://github.com/T-Flet/GPy-ABCD', license = 'BSD 3-Clause',