Skip to content

Commit

Permalink
Merge pull request #540 from genn-team/init_rng_tests
Browse files Browse the repository at this point in the history
Fixed potential bug where RNG isn't instantiated
  • Loading branch information
neworderofjamie authored Oct 24, 2022
2 parents f9e3fe5 + e6d85fb commit 6de3b70
Show file tree
Hide file tree
Showing 4 changed files with 178 additions and 6 deletions.
20 changes: 20 additions & 0 deletions src/genn/backends/single_threaded_cpu/backend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1484,6 +1484,26 @@ bool Backend::isGlobalHostRNGRequired(const ModelSpecMerged &modelMerged) const
return true;
}

// If any custom updates require an RNG fo initialisation, return true
if(std::any_of(model.getCustomUpdates().cbegin(), model.getCustomUpdates().cend(),
[](const ModelSpec::CustomUpdateValueType &c)
{
return (c.second.isInitRNGRequired());
}))
{
return true;
}

// If any custom WU updates require an RNG fo initialisation, return true
if(std::any_of(model.getCustomWUUpdates().cbegin(), model.getCustomWUUpdates().cend(),
[](const ModelSpec::CustomUpdateWUValueType &c)
{
return (c.second.isInitRNGRequired());
}))
{
return true;
}

return false;
}
//--------------------------------------------------------------------------
Expand Down
20 changes: 20 additions & 0 deletions src/genn/genn/code_generator/backendSIMT.cc
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,26 @@ bool BackendSIMT::isGlobalDeviceRNGRequired(const ModelSpecMerged &modelMerged)
return true;
}

// If any custom updates require an RNG fo initialisation, return true
if(std::any_of(model.getCustomUpdates().cbegin(), model.getCustomUpdates().cend(),
[](const ModelSpec::CustomUpdateValueType &c)
{
return (c.second.isInitRNGRequired());
}))
{
return true;
}

// If any custom WU updates require an RNG fo initialisation, return true
if(std::any_of(model.getCustomWUUpdates().cbegin(), model.getCustomWUUpdates().cend(),
[](const ModelSpec::CustomUpdateWUValueType &c)
{
return (c.second.isInitRNGRequired());
}))
{
return true;
}

return false;
}
//--------------------------------------------------------------------------
Expand Down
88 changes: 88 additions & 0 deletions tests/unit/customUpdate.cc
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,94 @@ IMPLEMENT_MODEL(ReduceNeuronSharedVar);
//--------------------------------------------------------------------------
// Tests
//--------------------------------------------------------------------------

TEST(CustomUpdates, ConstantVarSum)
{
ModelSpecInternal model;

NeuronModels::Izhikevich::ParamValues paramVals(0.02, 0.2, -65.0, 8.0);
NeuronModels::Izhikevich::VarValues varVals(0.0, 0.0);
NeuronGroup *ng = model.addNeuronPopulation<NeuronModels::Izhikevich>("Neurons0", 10, paramVals, varVals);

Sum::VarValues sumVarValues(0.0);
Sum::VarReferences sumVarReferences1(createVarRef(ng, "V"), createVarRef(ng, "U"));

CustomUpdate *cu = model.addCustomUpdate<Sum>("Sum", "CustomUpdate",
{}, sumVarValues, sumVarReferences1);
model.finalize();

CustomUpdateInternal *cuInternal = static_cast<CustomUpdateInternal*>(cu);
ASSERT_FALSE(cuInternal->isZeroCopyEnabled());
ASSERT_FALSE(cuInternal->isInitRNGRequired());

// Create a backend
CodeGenerator::SingleThreadedCPU::Preferences preferences;
CodeGenerator::SingleThreadedCPU::Backend backend(model.getPrecision(), preferences);

// Merge model
CodeGenerator::ModelSpecMerged modelSpecMerged(model, backend);

ASSERT_FALSE(backend.isGlobalHostRNGRequired(modelSpecMerged));
}

TEST(CustomUpdates, UninitialisedVarSum)
{
ModelSpecInternal model;

NeuronModels::Izhikevich::ParamValues paramVals(0.02, 0.2, -65.0, 8.0);
NeuronModels::Izhikevich::VarValues varVals(0.0, 0.0);
NeuronGroup *ng = model.addNeuronPopulation<NeuronModels::Izhikevich>("Neurons0", 10, paramVals, varVals);

Sum::VarValues sumVarValues(uninitialisedVar());
Sum::VarReferences sumVarReferences1(createVarRef(ng, "V"), createVarRef(ng, "U"));

CustomUpdate *cu = model.addCustomUpdate<Sum>("Sum", "CustomUpdate",
{}, sumVarValues, sumVarReferences1);
model.finalize();

CustomUpdateInternal *cuInternal = static_cast<CustomUpdateInternal*>(cu);
ASSERT_FALSE(cuInternal->isZeroCopyEnabled());
ASSERT_FALSE(cuInternal->isInitRNGRequired());

// Create a backend
CodeGenerator::SingleThreadedCPU::Preferences preferences;
CodeGenerator::SingleThreadedCPU::Backend backend(model.getPrecision(), preferences);

// Merge model
CodeGenerator::ModelSpecMerged modelSpecMerged(model, backend);

ASSERT_FALSE(backend.isGlobalHostRNGRequired(modelSpecMerged));
}

TEST(CustomUpdates, RandVarSum)
{
ModelSpecInternal model;

NeuronModels::Izhikevich::ParamValues paramVals(0.02, 0.2, -65.0, 8.0);
NeuronModels::Izhikevich::VarValues varVals(0.0, 0.0);
NeuronGroup *ng = model.addNeuronPopulation<NeuronModels::Izhikevich>("Neurons0", 10, paramVals, varVals);

InitVarSnippet::Uniform::ParamValues dist(0.0, 1.0);
Sum::VarValues sumVarValues(initVar<InitVarSnippet::Uniform>(dist));
Sum::VarReferences sumVarReferences1(createVarRef(ng, "V"), createVarRef(ng, "U"));

CustomUpdate *cu = model.addCustomUpdate<Sum>("Sum", "CustomUpdate",
{}, sumVarValues, sumVarReferences1);
model.finalize();

CustomUpdateInternal *cuInternal = static_cast<CustomUpdateInternal*>(cu);
ASSERT_FALSE(cuInternal->isZeroCopyEnabled());
ASSERT_TRUE(cuInternal->isInitRNGRequired());

// Create a backend
CodeGenerator::SingleThreadedCPU::Preferences preferences;
CodeGenerator::SingleThreadedCPU::Backend backend(model.getPrecision(), preferences);

// Merge model
CodeGenerator::ModelSpecMerged modelSpecMerged(model, backend);

ASSERT_TRUE(backend.isGlobalHostRNGRequired(modelSpecMerged));
}
TEST(CustomUpdates, VarReferenceTypeChecks)
{
ModelSpecInternal model;
Expand Down
56 changes: 50 additions & 6 deletions tests/unit/neuronGroup.cc
Original file line number Diff line number Diff line change
Expand Up @@ -211,55 +211,99 @@ TEST(NeuronGroup, InvalidName)

TEST(NeuronGroup, ConstantVarIzhikevich)
{
ModelSpec model;
ModelSpecInternal model;

NeuronModels::Izhikevich::ParamValues paramVals(0.02, 0.2, -65.0, 8.0);
NeuronModels::Izhikevich::VarValues varVals(0.0, 0.0);
NeuronGroup *ng = model.addNeuronPopulation<NeuronModels::Izhikevich>("Neurons0", 10, paramVals, varVals);

model.finalize();

ASSERT_FALSE(ng->isZeroCopyEnabled());
ASSERT_FALSE(ng->isSimRNGRequired());
ASSERT_FALSE(ng->isInitRNGRequired());

// Create a backend
CodeGenerator::SingleThreadedCPU::Preferences preferences;
CodeGenerator::SingleThreadedCPU::Backend backend(model.getPrecision(), preferences);

// Merge model
CodeGenerator::ModelSpecMerged modelSpecMerged(model, backend);

ASSERT_FALSE(backend.isGlobalHostRNGRequired(modelSpecMerged));
}

TEST(NeuronGroup, UnitialisedVarIzhikevich)
TEST(NeuronGroup, UninitialisedVarIzhikevich)
{
ModelSpec model;
ModelSpecInternal model;

NeuronModels::Izhikevich::ParamValues paramVals(0.02, 0.2, -65.0, 8.0);
NeuronModels::Izhikevich::VarValues varVals(uninitialisedVar(), uninitialisedVar());
NeuronGroup *ng = model.addNeuronPopulation<NeuronModels::Izhikevich>("Neurons0", 10, paramVals, varVals);

model.finalize();

ASSERT_FALSE(ng->isZeroCopyEnabled());
ASSERT_FALSE(ng->isSimRNGRequired());
ASSERT_FALSE(ng->isInitRNGRequired());

// Create a backend
CodeGenerator::SingleThreadedCPU::Preferences preferences;
CodeGenerator::SingleThreadedCPU::Backend backend(model.getPrecision(), preferences);

// Merge model
CodeGenerator::ModelSpecMerged modelSpecMerged(model, backend);

ASSERT_FALSE(backend.isGlobalHostRNGRequired(modelSpecMerged));
}

TEST(NeuronGroup, UnitialisedVarRand)
TEST(NeuronGroup, RandVarIzhikevich)
{
ModelSpec model;
ModelSpecInternal model;

InitVarSnippet::Uniform::ParamValues dist(0.0, 1.0);
NeuronModels::Izhikevich::ParamValues paramVals(0.02, 0.2, -65.0, 8.0);
NeuronModels::Izhikevich::VarValues varVals(0.0, initVar<InitVarSnippet::Uniform>(dist));
NeuronGroup *ng = model.addNeuronPopulation<NeuronModels::Izhikevich>("Neurons0", 10, paramVals, varVals);

model.finalize();

ASSERT_FALSE(ng->isZeroCopyEnabled());
ASSERT_FALSE(ng->isSimRNGRequired());
ASSERT_TRUE(ng->isInitRNGRequired());

// Create a backend
CodeGenerator::SingleThreadedCPU::Preferences preferences;
CodeGenerator::SingleThreadedCPU::Backend backend(model.getPrecision(), preferences);

// Merge model
CodeGenerator::ModelSpecMerged modelSpecMerged(model, backend);

ASSERT_TRUE(backend.isGlobalHostRNGRequired(modelSpecMerged));
}

TEST(NeuronGroup, Poisson)
{
ModelSpec model;
ModelSpecInternal model;

NeuronModels::PoissonNew::ParamValues paramVals(20.0);
NeuronModels::PoissonNew::VarValues varVals(0.0);
NeuronGroup *ng = model.addNeuronPopulation<NeuronModels::PoissonNew>("Neurons0", 10, paramVals, varVals);

model.finalize();

ASSERT_FALSE(ng->isZeroCopyEnabled());
ASSERT_TRUE(ng->isSimRNGRequired());
ASSERT_FALSE(ng->isInitRNGRequired());

// Create a backend
CodeGenerator::SingleThreadedCPU::Preferences preferences;
CodeGenerator::SingleThreadedCPU::Backend backend(model.getPrecision(), preferences);

// Merge model
CodeGenerator::ModelSpecMerged modelSpecMerged(model, backend);

ASSERT_TRUE(backend.isGlobalHostRNGRequired(modelSpecMerged));
}

TEST(NeuronGroup, FuseWUMPrePost)
Expand Down

0 comments on commit 6de3b70

Please sign in to comment.