Skip to content

Commit

Permalink
Revert "made some stuff more protected"
Browse files Browse the repository at this point in the history
This reverts commit ad9846a.
  • Loading branch information
neworderofjamie committed Nov 8, 2021
1 parent 7763e7c commit f12694f
Show file tree
Hide file tree
Showing 5 changed files with 120 additions and 108 deletions.
8 changes: 6 additions & 2 deletions include/genn/genn/code_generator/groupMerged.h
Original file line number Diff line number Diff line change
Expand Up @@ -567,9 +567,9 @@ class GENN_EXPORT NeuronPrevSpikeTimeUpdateGroupMerged : public GroupMerged<Neur
//----------------------------------------------------------------------------
class GENN_EXPORT NeuronGroupMergedBase : public GroupMerged<NeuronGroupInternal>
{
protected:
public:
//------------------------------------------------------------------------
// Protected methods
// Public API
//------------------------------------------------------------------------
//! Should the parameter be implemented heterogeneously?
bool isParamHeterogeneous(size_t index) const;
Expand Down Expand Up @@ -616,6 +616,10 @@ class GENN_EXPORT NeuronGroupMergedBase : public GroupMerged<NeuronGroupInternal
//! Get sorted vectors of current sources belonging to archetype group
const std::vector<CurrentSourceInternal*> &getSortedArchetypeCurrentSources() const { return m_SortedCurrentSources.front(); }

protected:
//------------------------------------------------------------------------
// Protected methods
//------------------------------------------------------------------------
NeuronGroupMergedBase(size_t index, const std::string &precision, const std::string &timePrecision, const BackendBase &backend,
bool init, const std::vector<std::reference_wrapper<const NeuronGroupInternal>> &groups);

Expand Down
72 changes: 40 additions & 32 deletions include/genn/genn/code_generator/initGroupMerged.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,24 @@ class GENN_EXPORT NeuronInitGroupMerged : public NeuronGroupMergedBase
//----------------------------------------------------------------------------
// Public API
//----------------------------------------------------------------------------
//! Should the incoming synapse weight update model var init parameter be implemented heterogeneously?
bool isInSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const;

//! Should the incoming synapse weight update model var init derived parameter be implemented heterogeneously?
bool isInSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const;

//! Should the outgoing synapse weight update model var init parameter be implemented heterogeneously?
bool isOutSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const;

//! Should the outgoing synapse weight update model var init derived parameter be implemented heterogeneously?
bool isOutSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const;

//! Get sorted vectors of incoming synapse groups with postsynaptic variables belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeInSynWithPostVars() const { return m_SortedInSynWithPostVars.front(); }

//! Get sorted vectors of outgoing synapse groups with presynaptic variables belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeOutSynWithPreVars() const { return m_SortedOutSynWithPreVars.front(); }

//! Get hash digest used for detecting changes
boost::uuids::detail::sha1::digest_type getHashDigest() const;

Expand Down Expand Up @@ -48,24 +66,6 @@ class GENN_EXPORT NeuronInitGroupMerged : public NeuronGroupMergedBase
bool(NeuronInitGroupMerged::*isDerivedParamHeterogeneousFn)(size_t, size_t, size_t) const,
const std::string&(SynapseGroupInternal::*getFusedVarSuffix)(void) const);

//! Should the incoming synapse weight update model var init parameter be implemented heterogeneously?
bool isInSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const;

//! Should the incoming synapse weight update model var init derived parameter be implemented heterogeneously?
bool isInSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const;

//! Should the outgoing synapse weight update model var init parameter be implemented heterogeneously?
bool isOutSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const;

//! Should the outgoing synapse weight update model var init derived parameter be implemented heterogeneously?
bool isOutSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const;

//! Get sorted vectors of incoming synapse groups with postsynaptic variables belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeInSynWithPostVars() const { return m_SortedInSynWithPostVars.front(); }

//! Get sorted vectors of outgoing synapse groups with presynaptic variables belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeOutSynWithPreVars() const { return m_SortedOutSynWithPreVars.front(); }

//! Is the incoming synapse weight update model var init parameter referenced?
bool isInSynWUMVarInitParamReferenced(size_t childIndex, size_t varIndex, size_t paramIndex) const;

Expand Down Expand Up @@ -206,6 +206,24 @@ class GENN_EXPORT SynapseConnectivityInitGroupMerged : public SynapseGroupMerged
template<typename G>
class CustomUpdateInitGroupMergedBase : public GroupMerged<G>
{
public:
//----------------------------------------------------------------------------
// Public API
//----------------------------------------------------------------------------
//! Should the var init parameter be implemented heterogeneously?
bool isVarInitParamHeterogeneous(size_t varIndex, size_t paramIndex) const
{
return (isVarInitParamReferenced(varIndex, paramIndex) &&
this->isParamValueHeterogeneous(paramIndex, [varIndex](const G &cg) { return cg.getVarInitialisers().at(varIndex).getParams(); }));
}

//! Should the var init derived parameter be implemented heterogeneously?
bool isVarInitDerivedParamHeterogeneous(size_t varIndex, size_t paramIndex) const
{
return (isVarInitDerivedParamReferenced(varIndex, paramIndex) &&
this->isParamValueHeterogeneous(paramIndex, [varIndex](const G &cg) { return cg.getVarInitialisers().at(varIndex).getDerivedParams(); }));
}

protected:
CustomUpdateInitGroupMergedBase(size_t index, const std::string &precision, const BackendBase &backend,
const std::vector<std::reference_wrapper<const G>> &groups)
Expand Down Expand Up @@ -252,6 +270,10 @@ class CustomUpdateInitGroupMergedBase : public GroupMerged<G>
&G::getVarInitialisers, &CustomUpdateInitGroupMergedBase<G>::isVarInitDerivedParamHeterogeneous, hash);
}

private:
//----------------------------------------------------------------------------
// Private methods
//----------------------------------------------------------------------------
//! Is the var init parameter referenced?
bool isVarInitParamReferenced(size_t varIndex, size_t paramIndex) const
{
Expand All @@ -269,20 +291,6 @@ class CustomUpdateInitGroupMergedBase : public GroupMerged<G>
const std::string derivedParamName = varInitSnippet->getDerivedParams().at(paramIndex).name;
return this->isParamReferenced({varInitSnippet->getCode()}, derivedParamName);
}

//! Should the var init parameter be implemented heterogeneously?
bool isVarInitParamHeterogeneous(size_t varIndex, size_t paramIndex) const
{
return (isVarInitParamReferenced(varIndex, paramIndex) &&
this->isParamValueHeterogeneous(paramIndex, [varIndex](const G &cg) { return cg.getVarInitialisers().at(varIndex).getParams(); }));
}

//! Should the var init derived parameter be implemented heterogeneously?
bool isVarInitDerivedParamHeterogeneous(size_t varIndex, size_t paramIndex) const
{
return (isVarInitDerivedParamReferenced(varIndex, paramIndex) &&
this->isParamValueHeterogeneous(paramIndex, [varIndex](const G &cg) { return cg.getVarInitialisers().at(varIndex).getDerivedParams(); }));
}
};

// ----------------------------------------------------------------------------
Expand Down
36 changes: 18 additions & 18 deletions include/genn/genn/code_generator/neuronUpdateGroupMerged.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,24 @@ class GENN_EXPORT NeuronUpdateGroupMerged : public NeuronGroupMergedBase
//------------------------------------------------------------------------
// Public API
//------------------------------------------------------------------------
//! Should the incoming synapse weight update model parameter be implemented heterogeneously?
bool isInSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const;

//! Should the incoming synapse weight update model derived parameter be implemented heterogeneously?
bool isInSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const;

//! Should the outgoing synapse weight update model parameter be implemented heterogeneously?
bool isOutSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const;

//! Should the outgoing synapse weight update model derived parameter be implemented heterogeneously?
bool isOutSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const;

//! Get sorted vectors of incoming synapse groups with postsynaptic code belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeInSynWithPostCode() const { return m_SortedInSynWithPostCode.front(); }

//! Get sorted vectors of outgoing synapse groups with presynaptic code belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeOutSynWithPreCode() const { return m_SortedOutSynWithPreCode.front(); }

//! Get hash digest used for detecting changes
boost::uuids::detail::sha1::digest_type getHashDigest() const;

Expand Down Expand Up @@ -58,18 +76,6 @@ class GENN_EXPORT NeuronUpdateGroupMerged : public NeuronGroupMergedBase
bool(NeuronUpdateGroupMerged::*isDerivedParamHeterogeneous)(size_t, size_t) const,
const std::string&(SynapseGroupInternal::*getFusedVarSuffix)(void) const);

//! Should the incoming synapse weight update model parameter be implemented heterogeneously?
bool isInSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const;

//! Should the incoming synapse weight update model derived parameter be implemented heterogeneously?
bool isInSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const;

//! Should the outgoing synapse weight update model parameter be implemented heterogeneously?
bool isOutSynWUMParamHeterogeneous(size_t childIndex, size_t paramIndex) const;

//! Should the outgoing synapse weight update model derived parameter be implemented heterogeneously?
bool isOutSynWUMDerivedParamHeterogeneous(size_t childIndex, size_t paramIndex) const;

//! Is the incoming synapse weight update model parameter referenced?
bool isInSynWUMParamReferenced(size_t childIndex, size_t paramIndex) const;

Expand All @@ -94,12 +100,6 @@ class GENN_EXPORT NeuronUpdateGroupMerged : public NeuronGroupMergedBase
bool(NeuronUpdateGroupMerged::*isParamHeterogeneous)(size_t, size_t) const,
bool(NeuronUpdateGroupMerged::*isDerivedParamHeterogeneous)(size_t, size_t) const) const;

//! Get sorted vectors of incoming synapse groups with postsynaptic code belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeInSynWithPostCode() const { return m_SortedInSynWithPostCode.front(); }

//! Get sorted vectors of outgoing synapse groups with presynaptic code belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeOutSynWithPreCode() const { return m_SortedOutSynWithPreCode.front(); }

//------------------------------------------------------------------------
// Members
//------------------------------------------------------------------------
Expand Down
56 changes: 28 additions & 28 deletions src/genn/genn/code_generator/initGroupMerged.cc
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,34 @@ NeuronInitGroupMerged::NeuronInitGroupMerged(size_t index, const std::string &pr
&SynapseGroupInternal::getFusedWUPreVarSuffix);
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isInSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
return (isInSynWUMVarInitParamReferenced(childIndex, varIndex, paramIndex) &&
isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostVars,
[varIndex](const SynapseGroupInternal *s) { return s->getWUPostVarInitialisers().at(varIndex).getParams(); }));
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isInSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
return (isInSynWUMVarInitDerivedParamReferenced(childIndex, varIndex, paramIndex) &&
isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostVars,
[varIndex](const SynapseGroupInternal *s) { return s->getWUPostVarInitialisers().at(varIndex).getDerivedParams(); }));
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isOutSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
return (isOutSynWUMVarInitParamReferenced(childIndex, varIndex, paramIndex) &&
isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreVars,
[varIndex](const SynapseGroupInternal *s) { return s->getWUPreVarInitialisers().at(varIndex).getParams(); }));
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isOutSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
return (isOutSynWUMVarInitDerivedParamReferenced(childIndex, varIndex, paramIndex) &&
isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreVars,
[varIndex](const SynapseGroupInternal *s) { return s->getWUPreVarInitialisers().at(varIndex).getDerivedParams(); }));
}
//----------------------------------------------------------------------------
boost::uuids::detail::sha1::digest_type NeuronInitGroupMerged::getHashDigest() const
{
boost::uuids::detail::sha1 hash;
Expand Down Expand Up @@ -412,34 +440,6 @@ void NeuronInitGroupMerged::generateWUVar(const BackendBase &backend,
}
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isInSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
return (isInSynWUMVarInitParamReferenced(childIndex, varIndex, paramIndex) &&
isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostVars,
[varIndex](const SynapseGroupInternal *s) { return s->getWUPostVarInitialisers().at(varIndex).getParams(); }));
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isInSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
return (isInSynWUMVarInitDerivedParamReferenced(childIndex, varIndex, paramIndex) &&
isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedInSynWithPostVars,
[varIndex](const SynapseGroupInternal *s) { return s->getWUPostVarInitialisers().at(varIndex).getDerivedParams(); }));
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isOutSynWUMVarInitParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
return (isOutSynWUMVarInitParamReferenced(childIndex, varIndex, paramIndex) &&
isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreVars,
[varIndex](const SynapseGroupInternal *s) { return s->getWUPreVarInitialisers().at(varIndex).getParams(); }));
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isOutSynWUMVarInitDerivedParamHeterogeneous(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
return (isOutSynWUMVarInitDerivedParamReferenced(childIndex, varIndex, paramIndex) &&
isChildParamValueHeterogeneous(childIndex, paramIndex, m_SortedOutSynWithPreVars,
[varIndex](const SynapseGroupInternal *s) { return s->getWUPreVarInitialisers().at(varIndex).getDerivedParams(); }));
}
//----------------------------------------------------------------------------
bool NeuronInitGroupMerged::isInSynWUMVarInitParamReferenced(size_t childIndex, size_t varIndex, size_t paramIndex) const
{
const auto *varInitSnippet = getSortedArchetypeInSynWithPostVars().at(childIndex)->getWUPostVarInitialisers().at(varIndex).getSnippet();
Expand Down
Loading

0 comments on commit f12694f

Please sign in to comment.