Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add to pre #479

Merged
merged 23 commits into from
Nov 18, 2021
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
94fa245
First attempts of following Jamie's recipe for adding $(addToPre,.) f…
tnowotny Nov 10, 2021
4f03d72
Added a first simple test and removed compilation errors that occurre…
tnowotny Nov 11, 2021
4a08d86
looked at a warning but decided not to change.
tnowotny Nov 11, 2021
4a8eec0
Added some source files that were missing in the commit.
tnowotny Nov 11, 2021
71e9d44
Added a few forgotten checks for whether pre-synaptic output was requ…
tnowotny Nov 11, 2021
071f784
Added missing default return value - duh!
tnowotny Nov 11, 2021
f358bcf
Corrected the test - must have been tired last night!
tnowotny Nov 12, 2021
a060ce8
Added another test - fmod turned out to be a nightmare unsuitable for…
tnowotny Nov 12, 2021
b4b3ba2
Removing invalid Windows bits.
tnowotny Nov 12, 2021
8296da7
Fixed test names and removed use of std namespace in genn code.
tnowotny Nov 12, 2021
61eb47f
Added two more feature tests for event code and learn_post code.
tnowotny Nov 12, 2021
6c3c0b1
Added missing initialsation for revInSynOutSyn* variables.
tnowotny Nov 16, 2021
f4d4445
Fixed a small issue where symlinks where left in /tmp if genn-buildmodel
tnowotny Nov 16, 2021
9d2edaf
replace duplicate makefiles with symlinks
neworderofjamie Nov 17, 2021
f02ca8d
removed unnecessary hash element - this if this isn't the case, this …
neworderofjamie Nov 17, 2021
2f08260
new unit test for fusing preUpdates
neworderofjamie Nov 17, 2021
32fc4d5
fixed typo ``getFusedPreOuptputOutSyn`` to ``getFusedPreOutputOutSyn``
neworderofjamie Nov 17, 2021
4e4ee64
small hashing fixes
neworderofjamie Nov 17, 2021
0fb471e
unit test for merging related to pre-outputs
neworderofjamie Nov 17, 2021
267b0de
replaced tabs with spaces
neworderofjamie Nov 17, 2021
a26a0b1
Added the pygenn handles to access setPreTargetVar.
tnowotny Nov 17, 2021
e16360d
Merge branch 'addToPre' of github.com:genn-team/genn into addToPre
tnowotny Nov 17, 2021
9186f3e
windows tests
neworderofjamie Nov 18, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions include/genn/genn/code_generator/groupMerged.h
Original file line number Diff line number Diff line change
Expand Up @@ -613,6 +613,9 @@ class GENN_EXPORT NeuronGroupMergedBase : public GroupMerged<NeuronGroupInternal
//! Get sorted vectors of merged incoming synapse groups belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeMergedInSyns() const { return m_SortedMergedInSyns.front(); }

//! Get sorted vectors of merged outgoing synapse groups with presynaptic output belonging to archetype group
const std::vector<SynapseGroupInternal*> &getSortedArchetypeMergedPreOutputOutSyns() const { return m_SortedMergedPreOutputOutSyns.front(); }

//! Get sorted vectors of current sources belonging to archetype group
const std::vector<CurrentSourceInternal*> &getSortedArchetypeCurrentSources() const { return m_SortedCurrentSources.front(); }

Expand Down Expand Up @@ -914,12 +917,16 @@ class GENN_EXPORT NeuronGroupMergedBase : public GroupMerged<NeuronGroupInternal
void addMergedInSynPointerField(const std::string &type, const std::string &name,
size_t archetypeIndex, const std::string &prefix);

void addMergedPreOutputOutSynPointerField(const std::string &type, const std::string &name,
size_t archetypeIndex, const std::string &prefix);


private:
//------------------------------------------------------------------------
// Members
//------------------------------------------------------------------------
std::vector<std::vector<SynapseGroupInternal*>> m_SortedMergedInSyns;
std::vector<std::vector<SynapseGroupInternal*>> m_SortedMergedPreOutputOutSyns;
std::vector<std::vector<CurrentSourceInternal*>> m_SortedCurrentSources;
};

Expand Down Expand Up @@ -1077,6 +1084,11 @@ class GENN_EXPORT SynapseGroupMergedBase : public GroupMerged<SynapseGroupIntern
return ((batchSize == 1) ? "" : "postBatchOffset + ") + index;
}

static std::string getPreISynIndex(unsigned int batchSize, const std::string &index)
{
return ((batchSize == 1) ? "" : "preBatchOffset + ") + index;
}

static std::string getSynVarIndex(unsigned int batchSize, VarAccessDuplication varDuplication, const std::string &index);

protected:
Expand Down Expand Up @@ -1108,6 +1120,7 @@ class GENN_EXPORT SynapseGroupMergedBase : public GroupMerged<SynapseGroupIntern
// Private methods
//------------------------------------------------------------------------
void addPSPointerField(const std::string &type, const std::string &name, const std::string &prefix);
void addPreOutputPointerField(const std::string &type, const std::string &name, const std::string &prefix);
void addSrcPointerField(const std::string &type, const std::string &name, const std::string &prefix);
void addTrgPointerField(const std::string &type, const std::string &name, const std::string &prefix);
void addWeightSharingPointerField(const std::string &type, const std::string &name, const std::string &prefix);
Expand Down
2 changes: 2 additions & 0 deletions include/genn/genn/neuronGroup.h
Original file line number Diff line number Diff line change
Expand Up @@ -237,6 +237,7 @@ class GENN_EXPORT NeuronGroup
const std::vector<SynapseGroupInternal*> &getOutSyn() const{ return m_OutSyn; }
const std::vector<SynapseGroupInternal *> &getFusedWUPreOutSyn() const { return m_FusedWUPreOutSyn; }

const std::vector<SynapseGroupInternal *> &getFusedPreOuptputOutSyn() const { return m_FusedPreOutputOutSyn; }

//! Gets pointers to all current sources which provide input to this neuron group
const std::vector<CurrentSourceInternal*> &getCurrentSources() const { return m_CurrentSources; }
Expand Down Expand Up @@ -297,6 +298,7 @@ class GENN_EXPORT NeuronGroup
std::vector<SynapseGroupInternal*> m_FusedPSMInSyn;
std::vector<SynapseGroupInternal *> m_FusedWUPostInSyn;
std::vector<SynapseGroupInternal *> m_FusedWUPreOutSyn;
std::vector<SynapseGroupInternal *> m_FusedPreOutputOutSyn;
std::set<SpikeEventThreshold> m_SpikeEventCondition;
unsigned int m_NumDelaySlots;
std::vector<CurrentSourceInternal*> m_CurrentSources;
Expand Down
1 change: 1 addition & 0 deletions include/genn/genn/neuronGroupInternal.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class NeuronGroupInternal : public NeuronGroup
using NeuronGroup::injectCurrent;
using NeuronGroup::getFusedPSMInSyn;
using NeuronGroup::getFusedWUPostInSyn;
using NeuronGroup::getFusedPreOuptputOutSyn;
using NeuronGroup::getFusedWUPreOutSyn;
using NeuronGroup::getOutSyn;
using NeuronGroup::getCurrentSources;
Expand Down
28 changes: 28 additions & 0 deletions include/genn/genn/synapseGroup.h
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,10 @@ class GENN_EXPORT SynapseGroup
/*! This should either be 'Isyn' or the name of one of the postsynaptic neuron's additional input variables. */
void setPSTargetVar(const std::string &varName);

//! Set name of neuron input variable $(addToPre, . ) commands will target
/*! This should either be 'Isyn' or the name of one of the presynaptic neuron's additional input variables. */
void setPreTargetVar(const std::string &varName);

//! Set location of sparse connectivity initialiser extra global parameter
/*! This is ignored for simulations on hardware with a single memory space
and only applies to extra global parameters which are pointers. */
Expand Down Expand Up @@ -221,6 +225,10 @@ class GENN_EXPORT SynapseGroup
//! Get name of neuron input variable postsynaptic model will target
/*! This will either be 'Isyn' or the name of one of the postsynaptic neuron's additional input variables. */
const std::string &getPSTargetVar() const{ return m_PSTargetVar; }

//! Get name of neuron input variable which a presynaptic output specified with $(addToPre) will target
/*! This will either be 'Isyn' or the name of one of the presynaptic neuron's additional input variables. */
const std::string &getPreTargetVar() const{ return m_PreTargetVar; }

//! Get location of sparse connectivity initialiser extra global parameter by name
/*! This is only used by extra global parameters which are pointers*/
Expand All @@ -233,6 +241,9 @@ class GENN_EXPORT SynapseGroup
//! Does this synapse group require dendritic delay?
bool isDendriticDelayRequired() const;

//! Does this synapse group define presynaptic output?
bool isPresynapticOutputRequired() const;

//! Does this synapse group require an RNG to generate procedural connectivity?
bool isProceduralConnectivityRNGRequired() const;

Expand Down Expand Up @@ -280,6 +291,7 @@ class GENN_EXPORT SynapseGroup
void setFusedPSVarSuffix(const std::string &suffix){ m_FusedPSVarSuffix = suffix; }
void setFusedWUPreVarSuffix(const std::string &suffix){ m_FusedWUPreVarSuffix = suffix; }
void setFusedWUPostVarSuffix(const std::string &suffix){ m_FusedWUPostVarSuffix = suffix; }
void setFusedPreOutputSuffix(const std::string &suffix){ m_FusedPreOutputSuffix = suffix; }

void initDerivedParams(double dt);

Expand All @@ -301,6 +313,7 @@ class GENN_EXPORT SynapseGroup
const std::string &getFusedPSVarSuffix() const{ return m_FusedPSVarSuffix; }
const std::string &getFusedWUPreVarSuffix() const { return m_FusedWUPreVarSuffix; }
const std::string &getFusedWUPostVarSuffix() const { return m_FusedWUPostVarSuffix; }
const std::string &getFusedPreOutputSuffix() const { return m_FusedPreOutputSuffix; }

//! Are any of this synapse group's weight update model variables referenced by a custom update
bool areWUVarReferencedByCustomUpdate() const { return m_WUVarReferencedByCustomUpdate; }
Expand All @@ -310,6 +323,9 @@ class GENN_EXPORT SynapseGroup

//! Can presynaptic update component of this synapse group's weight update model be safely fused with other whose hashes match so only one needs simulating at all?
bool canWUMPreUpdateBeFused() const;

//! Can presynaptic output component of this synapse group's weight update model be safely fused with other whose hashes match so only one needs simulating at all?
bool canPreOutputBeFused() const;

//! Can postsynaptic update component of this synapse group's weight update model be safely fused with other whose hashes match so only one needs simulating at all?
bool canWUMPostUpdateBeFused() const;
Expand Down Expand Up @@ -345,6 +361,10 @@ class GENN_EXPORT SynapseGroup
/*! NOTE: this can only be called after model is finalized */
boost::uuids::detail::sha1::digest_type getPSFuseHashDigest() const;

//! Generate hash of presynaptic output update component of this synapse group
/*! NOTE: this can only be called after model is finalized */
boost::uuids::detail::sha1::digest_type getPreOutputHashDigest() const;

boost::uuids::detail::sha1::digest_type getDendriticDelayUpdateHashDigest() const;

//! Generate hash of initialisation component of this synapse group
Expand Down Expand Up @@ -500,7 +520,15 @@ class GENN_EXPORT SynapseGroup
/*! This may not be the name of this synapse group if it has been fused */
std::string m_FusedWUPostVarSuffix;

//! Suffix for weight update model presynaptic output variable
/*! This may not be the name of this synapse group if it has been fused */
std::string m_FusedPreOutputSuffix;

//! Name of neuron input variable postsynaptic model will target
/*! This should either be 'Isyn' or the name of one of the postsynaptic neuron's additional input variables. */
std::string m_PSTargetVar;

//! Name of neuron input variable a presynaptic output specified with $(addToPre) will target
/*! This will either be 'Isyn' or the name of one of the presynaptic neuron's additional input variables. */
std::string m_PreTargetVar;
};
4 changes: 4 additions & 0 deletions include/genn/genn/synapseGroupInternal.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,23 +34,27 @@ class SynapseGroupInternal : public SynapseGroup
using SynapseGroup::setEventThresholdReTestRequired;
using SynapseGroup::setWUVarReferencedByCustomUpdate;
using SynapseGroup::setFusedPSVarSuffix;
using SynapseGroup::setFusedPreOutputSuffix;
using SynapseGroup::setFusedWUPreVarSuffix;
using SynapseGroup::setFusedWUPostVarSuffix;
using SynapseGroup::initDerivedParams;
using SynapseGroup::isEventThresholdReTestRequired;
using SynapseGroup::areWUVarReferencedByCustomUpdate;
using SynapseGroup::getFusedPSVarSuffix;
using SynapseGroup::getFusedPreOutputSuffix;
using SynapseGroup::getFusedWUPreVarSuffix;
using SynapseGroup::getFusedWUPostVarSuffix;
using SynapseGroup::getSparseIndType;
using SynapseGroup::canPSBeFused;
using SynapseGroup::canWUMPreUpdateBeFused;
using SynapseGroup::canWUMPostUpdateBeFused;
using SynapseGroup::canPreOutputBeFused;
using SynapseGroup::getWUHashDigest;
using SynapseGroup::getWUPreHashDigest;
using SynapseGroup::getWUPostHashDigest;
using SynapseGroup::getPSHashDigest;
using SynapseGroup::getPSFuseHashDigest;
using SynapseGroup::getPreOutputHashDigest;
using SynapseGroup::getWUPreFuseHashDigest;
using SynapseGroup::getWUPostFuseHashDigest;
using SynapseGroup::getDendriticDelayUpdateHashDigest;
Expand Down
14 changes: 12 additions & 2 deletions src/genn/backends/single_threaded_cpu/backend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -342,6 +342,9 @@ void Backend::genSynapseUpdate(CodeStream &os, const ModelSpecMerged &modelMerge
synSubs.addFuncSubstitution("addToInSyn", 1, "group->inSyn[" + s.getPostISynIndex(1, "j") + "] += $(0)");
}

if(s.getArchetype().isPresynapticOutputRequired()) {
synSubs.addFuncSubstitution("addToPre", 1, "group->revInSyn[" + s.getPreISynIndex(1, synSubs["id_pre"]) + "] += $(0)");
}
// Call synapse dynamics handler
s.generateSynapseUpdate(*this, os, modelMerged, synSubs);
}
Expand All @@ -364,7 +367,7 @@ void Backend::genSynapseUpdate(CodeStream &os, const ModelSpecMerged &modelMerge
os << "const auto *group = &mergedPresynapticUpdateGroup" << s.getIndex() << "[g]; " << std::endl;

genSynapseIndexCalculation(os, s, 1);

// generate the code for processing spike-like events
if (s.getArchetype().isSpikeEventRequired()) {
genPresynapticUpdate(os, modelMerged, s, funcSubs, false);
Expand Down Expand Up @@ -435,7 +438,10 @@ void Backend::genSynapseUpdate(CodeStream &os, const ModelSpecMerged &modelMerge
synSubs.addVarSubstitution("id_syn", "((group->numTrgNeurons * i) + spike)");
}
synSubs.addVarSubstitution("id_post", "spike");

if (s.getArchetype().isPresynapticOutputRequired()) {
synSubs.addFuncSubstitution("addToPre", 1, "group->revInSyn[" + s.getPreISynIndex(1, synSubs["id_pre"]) + "] += $(0)");
}

s.generateSynapseUpdate(*this, os, modelMerged, synSubs);
}
}
Expand Down Expand Up @@ -1453,6 +1459,10 @@ void Backend::genPresynapticUpdate(CodeStream &os, const ModelSpecMerged &modelM
synSubs.addFuncSubstitution("addToInSyn", 1, "group->inSyn[" + sg.getPostISynIndex(1, "ipost") + "] += $(0)");
}

if (sg.getArchetype().isPresynapticOutputRequired()) {
synSubs.addFuncSubstitution("addToPre", 1, "group->revInSyn[" + sg.getPreISynIndex(1, synSubs["id_pre"]) + "] += $(0)");
}

if (sg.getArchetype().getMatrixType() & SynapseMatrixConnectivity::SPARSE) {
os << "const unsigned int npost = group->rowLength[ipre];" << std::endl;
os << "for (unsigned int j = 0; j < npost; j++)";
Expand Down
14 changes: 12 additions & 2 deletions src/genn/genn/code_generator/backendSIMT.cc
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ void BackendSIMT::genNeuronPrevSpikeTimeUpdateKernel(CodeStream &os, const Subst
genParallelGroup<NeuronPrevSpikeTimeUpdateGroupMerged>(
os, kernelSubs, modelMerged.getMergedNeuronPrevSpikeTimeUpdateGroups(), idStart,
[this](const NeuronGroupInternal &ng) { return padKernelSize(ng.getNumNeurons(), KernelNeuronUpdate); },
[batchSize, this](CodeStream &os, const NeuronPrevSpikeTimeUpdateGroupMerged &ng, Substitutions &popSubs)
[batchSize,this](CodeStream &os, const NeuronPrevSpikeTimeUpdateGroupMerged &ng, Substitutions &popSubs)
{
CodeStream::Scope b(os);

Expand Down Expand Up @@ -737,6 +737,11 @@ void BackendSIMT::genPostsynapticUpdateKernel(CodeStream &os, const Substitution
synSubs.addVarSubstitution("id_post", "shSpk[j]");
synSubs.addVarSubstitution("id_syn", "synAddress");

if(sg.getArchetype().isPresynapticOutputRequired()) {
synSubs.addFuncSubstitution("addToPre", 1,
getAtomic(modelMerged.getModel().getPrecision()) + "(&group->revInSyn[" + sg.getPreISynIndex(batchSize, synSubs["id_pre"]) + "], $(0))");
}

sg.generateSynapseUpdate(*this, os, modelMerged, synSubs);

if (sg.getArchetype().getMatrixType() & SynapseMatrixConnectivity::SPARSE) {
Expand Down Expand Up @@ -797,7 +802,12 @@ void BackendSIMT::genSynapseDynamicsKernel(CodeStream &os, const Substitutions &
else {
synSubs.addFuncSubstitution("addToInSyn", 1, getAtomic(modelMerged.getModel().getPrecision()) + "(&group->inSyn[" + sg.getPostISynIndex(batchSize, synSubs["id_post"]) + "], $(0))");
}


if(sg.getArchetype().isPresynapticOutputRequired()) {
synSubs.addFuncSubstitution("addToPre", 1,
getAtomic(modelMerged.getModel().getPrecision()) + "(&group->revInSyn[" + sg.getPreISynIndex(batchSize, synSubs["id_pre"]) + "], $(0))");
}

sg.generateSynapseUpdate(*this, os, modelMerged, synSubs);
}
});
Expand Down
6 changes: 6 additions & 0 deletions src/genn/genn/code_generator/generateRunner.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1152,6 +1152,12 @@ MemAlloc CodeGenerator::generateRunner(const filesystem::path &outputPath, const
}
}
}
// Loop through fused outgoing synapse populations with weightupdate models that have presynaptic output
for(const auto *sg : n.second.getFusedPreOuptputOutSyn()) {
backend.genArray(definitionsVar, definitionsInternalVar, runnerVarDecl, runnerVarAlloc, runnerVarFree,
model.getPrecision(), "revInSyn" + sg->getFusedPreOutputSuffix(), sg->getInSynLocation(),
sg->getSrcNeuronGroup()->getNumNeurons() * batchSize, mem);
}

// Loop through merged postsynaptic weight updates of incoming synaptic populations
for(const auto *sg: n.second.getFusedWUPreOutSyn()) {
Expand Down
Loading