From f12694f13427a62e3477cd02a9a379961f953439 Mon Sep 17 00:00:00 2001 From: neworderofjamie Date: Mon, 8 Nov 2021 10:45:34 +0000 Subject: [PATCH] Revert "made some stuff more protected" This reverts commit ad9846ad58da2bd026b7dd1ac0343889c3cd36bc. --- .../genn/genn/code_generator/groupMerged.h | 8 ++- .../genn/code_generator/initGroupMerged.h | 72 ++++++++++--------- .../code_generator/neuronUpdateGroupMerged.h | 36 +++++----- .../genn/code_generator/initGroupMerged.cc | 56 +++++++-------- .../code_generator/neuronUpdateGroupMerged.cc | 56 +++++++-------- 5 files changed, 120 insertions(+), 108 deletions(-) diff --git a/include/genn/genn/code_generator/groupMerged.h b/include/genn/genn/code_generator/groupMerged.h index 7db0a0c5b2..6a07f1d8f5 100644 --- a/include/genn/genn/code_generator/groupMerged.h +++ b/include/genn/genn/code_generator/groupMerged.h @@ -567,9 +567,9 @@ class GENN_EXPORT NeuronPrevSpikeTimeUpdateGroupMerged : public GroupMerged { -protected: +public: //------------------------------------------------------------------------ - // Protected methods + // Public API //------------------------------------------------------------------------ //! Should the parameter be implemented heterogeneously? bool isParamHeterogeneous(size_t index) const; @@ -616,6 +616,10 @@ class GENN_EXPORT NeuronGroupMergedBase : public GroupMerged &getSortedArchetypeCurrentSources() const { return m_SortedCurrentSources.front(); } +protected: + //------------------------------------------------------------------------ + // Protected methods + //------------------------------------------------------------------------ NeuronGroupMergedBase(size_t index, const std::string &precision, const std::string &timePrecision, const BackendBase &backend, bool init, const std::vector> &groups); diff --git a/include/genn/genn/code_generator/initGroupMerged.h b/include/genn/genn/code_generator/initGroupMerged.h index 3008d9f950..ba21046ca9 100644 --- a/include/genn/genn/code_generator/initGroupMerged.h +++ b/include/genn/genn/code_generator/initGroupMerged.h @@ -17,6 +17,24 @@ class GENN_EXPORT NeuronInitGroupMerged : public NeuronGroupMergedBase //---------------------------------------------------------------------------- // Public API //---------------------------------------------------------------------------- + //! Should the incoming synapse weight update model var init parameter be implemented heterogeneously? + bool isInSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const; + + //! Should the incoming synapse weight update model var init derived parameter be implemented heterogeneously? + bool isInSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const; + + //! Should the outgoing synapse weight update model var init parameter be implemented heterogeneously? + bool isOutSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const; + + //! Should the outgoing synapse weight update model var init derived parameter be implemented heterogeneously? + bool isOutSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const; + + //! Get sorted vectors of incoming synapse groups with postsynaptic variables belonging to archetype group + const std::vector &getSortedArchetypeInSynWithPostVars() const { return m_SortedInSynWithPostVars.front(); } + + //! Get sorted vectors of outgoing synapse groups with presynaptic variables belonging to archetype group + const std::vector &getSortedArchetypeOutSynWithPreVars() const { return m_SortedOutSynWithPreVars.front(); } + //! Get hash digest used for detecting changes boost::uuids::detail::sha1::digest_type getHashDigest() const; @@ -48,24 +66,6 @@ class GENN_EXPORT NeuronInitGroupMerged : public NeuronGroupMergedBase bool(NeuronInitGroupMerged::*isDerivedParamHeterogeneousFn)(size_t, size_t, size_t) const, const std::string&(SynapseGroupInternal::*getFusedVarSuffix)(void) const); - //! Should the incoming synapse weight update model var init parameter be implemented heterogeneously? - bool isInSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const; - - //! Should the incoming synapse weight update model var init derived parameter be implemented heterogeneously? - bool isInSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const; - - //! Should the outgoing synapse weight update model var init parameter be implemented heterogeneously? - bool isOutSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const; - - //! Should the outgoing synapse weight update model var init derived parameter be implemented heterogeneously? - bool isOutSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const; - - //! Get sorted vectors of incoming synapse groups with postsynaptic variables belonging to archetype group - const std::vector &getSortedArchetypeInSynWithPostVars() const { return m_SortedInSynWithPostVars.front(); } - - //! Get sorted vectors of outgoing synapse groups with presynaptic variables belonging to archetype group - const std::vector &getSortedArchetypeOutSynWithPreVars() const { return m_SortedOutSynWithPreVars.front(); } - //! Is the incoming synapse weight update model var init parameter referenced? bool isInSynWUMVarInitParamReferenced(size_t childIndex, size_t varIndex, size_t paramIndex) const; @@ -206,6 +206,24 @@ class GENN_EXPORT SynapseConnectivityInitGroupMerged : public SynapseGroupMerged template class CustomUpdateInitGroupMergedBase : public GroupMerged { +public: + //---------------------------------------------------------------------------- + // Public API + //---------------------------------------------------------------------------- + //! Should the var init parameter be implemented heterogeneously? + bool isVarInitParamHeterogeneous(size_t varIndex, size_t paramIndex) const + { + return (isVarInitParamReferenced(varIndex, paramIndex) && + this->isParamValueHeterogeneous(paramIndex, [varIndex](const G &cg) { return cg.getVarInitialisers().at(varIndex).getParams(); })); + } + + //! Should the var init derived parameter be implemented heterogeneously? + bool isVarInitDerivedParamHeterogeneous(size_t varIndex, size_t paramIndex) const + { + return (isVarInitDerivedParamReferenced(varIndex, paramIndex) && + this->isParamValueHeterogeneous(paramIndex, [varIndex](const G &cg) { return cg.getVarInitialisers().at(varIndex).getDerivedParams(); })); + } + protected: CustomUpdateInitGroupMergedBase(size_t index, const std::string &precision, const BackendBase &backend, const std::vector> &groups) @@ -252,6 +270,10 @@ class CustomUpdateInitGroupMergedBase : public GroupMerged &G::getVarInitialisers, &CustomUpdateInitGroupMergedBase::isVarInitDerivedParamHeterogeneous, hash); } +private: + //---------------------------------------------------------------------------- + // Private methods + //---------------------------------------------------------------------------- //! Is the var init parameter referenced? bool isVarInitParamReferenced(size_t varIndex, size_t paramIndex) const { @@ -269,20 +291,6 @@ class CustomUpdateInitGroupMergedBase : public GroupMerged const std::string derivedParamName = varInitSnippet->getDerivedParams().at(paramIndex).name; return this->isParamReferenced({varInitSnippet->getCode()}, derivedParamName); } - - //! Should the var init parameter be implemented heterogeneously? - bool isVarInitParamHeterogeneous(size_t varIndex, size_t paramIndex) const - { - return (isVarInitParamReferenced(varIndex, paramIndex) && - this->isParamValueHeterogeneous(paramIndex, [varIndex](const G &cg) { return cg.getVarInitialisers().at(varIndex).getParams(); })); - } - - //! Should the var init derived parameter be implemented heterogeneously? - bool isVarInitDerivedParamHeterogeneous(size_t varIndex, size_t paramIndex) const - { - return (isVarInitDerivedParamReferenced(varIndex, paramIndex) && - this->isParamValueHeterogeneous(paramIndex, [varIndex](const G &cg) { return cg.getVarInitialisers().at(varIndex).getDerivedParams(); })); - } }; // ---------------------------------------------------------------------------- diff --git a/include/genn/genn/code_generator/neuronUpdateGroupMerged.h b/include/genn/genn/code_generator/neuronUpdateGroupMerged.h index 82dfd4ffd8..09d5259a73 100644 --- a/include/genn/genn/code_generator/neuronUpdateGroupMerged.h +++ b/include/genn/genn/code_generator/neuronUpdateGroupMerged.h @@ -17,6 +17,24 @@ class GENN_EXPORT NeuronUpdateGroupMerged : public NeuronGroupMergedBase //------------------------------------------------------------------------ // Public API //------------------------------------------------------------------------ + //! Should the incoming synapse weight update model parameter be implemented heterogeneously? + bool isInSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const; + + //! Should the incoming synapse weight update model derived parameter be implemented heterogeneously? + bool isInSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const; + + //! Should the outgoing synapse weight update model parameter be implemented heterogeneously? + bool isOutSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const; + + //! Should the outgoing synapse weight update model derived parameter be implemented heterogeneously? + bool isOutSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const; + + //! Get sorted vectors of incoming synapse groups with postsynaptic code belonging to archetype group + const std::vector &getSortedArchetypeInSynWithPostCode() const { return m_SortedInSynWithPostCode.front(); } + + //! Get sorted vectors of outgoing synapse groups with presynaptic code belonging to archetype group + const std::vector &getSortedArchetypeOutSynWithPreCode() const { return m_SortedOutSynWithPreCode.front(); } + //! Get hash digest used for detecting changes boost::uuids::detail::sha1::digest_type getHashDigest() const; @@ -58,18 +76,6 @@ class GENN_EXPORT NeuronUpdateGroupMerged : public NeuronGroupMergedBase bool(NeuronUpdateGroupMerged::*isDerivedParamHeterogeneous)(size_t, size_t) const, const std::string&(SynapseGroupInternal::*getFusedVarSuffix)(void) const); - //! Should the incoming synapse weight update model parameter be implemented heterogeneously? - bool isInSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const; - - //! Should the incoming synapse weight update model derived parameter be implemented heterogeneously? - bool isInSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const; - - //! Should the outgoing synapse weight update model parameter be implemented heterogeneously? - bool isOutSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const; - - //! Should the outgoing synapse weight update model derived parameter be implemented heterogeneously? - bool isOutSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const; - //! Is the incoming synapse weight update model parameter referenced? bool isInSynWUMParamReferenced(size_t childIndex, size_t paramIndex) const; @@ -94,12 +100,6 @@ class GENN_EXPORT NeuronUpdateGroupMerged : public NeuronGroupMergedBase bool(NeuronUpdateGroupMerged::*isParamHeterogeneous)(size_t, size_t) const, bool(NeuronUpdateGroupMerged::*isDerivedParamHeterogeneous)(size_t, size_t) const) const; - //! Get sorted vectors of incoming synapse groups with postsynaptic code belonging to archetype group - const std::vector &getSortedArchetypeInSynWithPostCode() const { return m_SortedInSynWithPostCode.front(); } - - //! Get sorted vectors of outgoing synapse groups with presynaptic code belonging to archetype group - const std::vector &getSortedArchetypeOutSynWithPreCode() const { return m_SortedOutSynWithPreCode.front(); } - //------------------------------------------------------------------------ // Members //------------------------------------------------------------------------ diff --git a/src/genn/genn/code_generator/initGroupMerged.cc b/src/genn/genn/code_generator/initGroupMerged.cc index 6049b6bda4..67935604ea 100644 --- a/src/genn/genn/code_generator/initGroupMerged.cc +++ b/src/genn/genn/code_generator/initGroupMerged.cc @@ -188,6 +188,34 @@ NeuronInitGroupMerged::NeuronInitGroupMerged(size_t index, const std::string &pr &SynapseGroupInternal::getFusedWUPreVarSuffix); } //---------------------------------------------------------------------------- +bool NeuronInitGroupMerged::isInSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const +{ + return (isInSynWUMVarInitParamReferenced(childIndex, varIndex, paramIndex) && + isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostVars, + [varIndex](const SynapseGroupInternal *s) { return s->getWUPostVarInitialisers().at(varIndex).getParams(); })); +} +//---------------------------------------------------------------------------- +bool NeuronInitGroupMerged::isInSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const +{ + return (isInSynWUMVarInitDerivedParamReferenced(childIndex, varIndex, paramIndex) && + isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostVars, + [varIndex](const SynapseGroupInternal *s) { return s->getWUPostVarInitialisers().at(varIndex).getDerivedParams(); })); +} +//---------------------------------------------------------------------------- +bool NeuronInitGroupMerged::isOutSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const +{ + return (isOutSynWUMVarInitParamReferenced(childIndex, varIndex, paramIndex) && + isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreVars, + [varIndex](const SynapseGroupInternal *s) { return s->getWUPreVarInitialisers().at(varIndex).getParams(); })); +} +//---------------------------------------------------------------------------- +bool NeuronInitGroupMerged::isOutSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const +{ + return (isOutSynWUMVarInitDerivedParamReferenced(childIndex, varIndex, paramIndex) && + isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreVars, + [varIndex](const SynapseGroupInternal *s) { return s->getWUPreVarInitialisers().at(varIndex).getDerivedParams(); })); +} +//---------------------------------------------------------------------------- boost::uuids::detail::sha1::digest_type NeuronInitGroupMerged::getHashDigest() const { boost::uuids::detail::sha1 hash; @@ -412,34 +440,6 @@ void NeuronInitGroupMerged::generateWUVar(const BackendBase &backend, } } //---------------------------------------------------------------------------- -bool NeuronInitGroupMerged::isInSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const -{ - return (isInSynWUMVarInitParamReferenced(childIndex, varIndex, paramIndex) && - isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostVars, - [varIndex](const SynapseGroupInternal *s) { return s->getWUPostVarInitialisers().at(varIndex).getParams(); })); -} -//---------------------------------------------------------------------------- -bool NeuronInitGroupMerged::isInSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const -{ - return (isInSynWUMVarInitDerivedParamReferenced(childIndex, varIndex, paramIndex) && - isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostVars, - [varIndex](const SynapseGroupInternal *s) { return s->getWUPostVarInitialisers().at(varIndex).getDerivedParams(); })); -} -//---------------------------------------------------------------------------- -bool NeuronInitGroupMerged::isOutSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const -{ - return (isOutSynWUMVarInitParamReferenced(childIndex, varIndex, paramIndex) && - isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreVars, - [varIndex](const SynapseGroupInternal *s) { return s->getWUPreVarInitialisers().at(varIndex).getParams(); })); -} -//---------------------------------------------------------------------------- -bool NeuronInitGroupMerged::isOutSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const -{ - return (isOutSynWUMVarInitDerivedParamReferenced(childIndex, varIndex, paramIndex) && - isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreVars, - [varIndex](const SynapseGroupInternal *s) { return s->getWUPreVarInitialisers().at(varIndex).getDerivedParams(); })); -} -//---------------------------------------------------------------------------- bool NeuronInitGroupMerged::isInSynWUMVarInitParamReferenced(size_t childIndex, size_t varIndex, size_t paramIndex) const { const auto *varInitSnippet = getSortedArchetypeInSynWithPostVars().at(childIndex)->getWUPostVarInitialisers().at(varIndex).getSnippet(); diff --git a/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc b/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc index 3517e90499..74f88a2fae 100644 --- a/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc +++ b/src/genn/genn/code_generator/neuronUpdateGroupMerged.cc @@ -113,6 +113,34 @@ NeuronUpdateGroupMerged::NeuronUpdateGroupMerged(size_t index, const std::string } //---------------------------------------------------------------------------- +bool NeuronUpdateGroupMerged::isInSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const +{ + return (isInSynWUMParamReferenced(childIndex, paramIndex) && + isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostCode, + [](const SynapseGroupInternal *s) { return s->getWUParams(); })); +} +//---------------------------------------------------------------------------- +bool NeuronUpdateGroupMerged::isInSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const +{ + return (isInSynWUMDerivedParamReferenced(childIndex, paramIndex) && + isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostCode, + [](const SynapseGroupInternal *s) { return s->getWUDerivedParams(); })); +} +//---------------------------------------------------------------------------- +bool NeuronUpdateGroupMerged::isOutSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const +{ + return (isOutSynWUMParamReferenced(childIndex, paramIndex) && + isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreCode, + [](const SynapseGroupInternal *s) { return s->getWUParams(); })); +} +//---------------------------------------------------------------------------- +bool NeuronUpdateGroupMerged::isOutSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const +{ + return (isOutSynWUMDerivedParamReferenced(childIndex, paramIndex) && + isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreCode, + [](const SynapseGroupInternal *s) { return s->getWUDerivedParams(); })); +} +//---------------------------------------------------------------------------- boost::uuids::detail::sha1::digest_type NeuronUpdateGroupMerged::getHashDigest() const { boost::uuids::detail::sha1 hash; @@ -618,34 +646,6 @@ void NeuronUpdateGroupMerged::generateWUVarUpdate(const BackendBase&, CodeStream &NeuronUpdateGroupMerged::isInSynWUMParamHeterogeneous, &NeuronUpdateGroupMerged::isInSynWUMDerivedParamHeterogeneous); } -//---------------------------------------------------------------------------- -bool NeuronUpdateGroupMerged::isInSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const -{ - return (isInSynWUMParamReferenced(childIndex, paramIndex) && - isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostCode, - [](const SynapseGroupInternal *s) { return s->getWUParams(); })); -} -//---------------------------------------------------------------------------- -bool NeuronUpdateGroupMerged::isInSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const -{ - return (isInSynWUMDerivedParamReferenced(childIndex, paramIndex) && - isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostCode, - [](const SynapseGroupInternal *s) { return s->getWUDerivedParams(); })); -} -//---------------------------------------------------------------------------- -bool NeuronUpdateGroupMerged::isOutSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const -{ - return (isOutSynWUMParamReferenced(childIndex, paramIndex) && - isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreCode, - [](const SynapseGroupInternal *s) { return s->getWUParams(); })); -} -//---------------------------------------------------------------------------- -bool NeuronUpdateGroupMerged::isOutSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const -{ - return (isOutSynWUMDerivedParamReferenced(childIndex, paramIndex) && - isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreCode, - [](const SynapseGroupInternal *s) { return s->getWUDerivedParams(); })); -} //-------------------------------------------------------------------------- std::string NeuronUpdateGroupMerged::getVarIndex(unsigned int batchSize, VarAccessDuplication varDuplication, const std::string &index) {