From 88a0f66e652fe52322776e0198080001828cc8da Mon Sep 17 00:00:00 2001 From: wangyanfei01 Date: Thu, 13 Oct 2016 20:58:31 +0800 Subject: [PATCH 1/3] remove unused init function --- paddle/trainer/Trainer.cpp | 9 --------- paddle/trainer/Trainer.h | 6 ------ 2 files changed, 15 deletions(-) diff --git a/paddle/trainer/Trainer.cpp b/paddle/trainer/Trainer.cpp index 275150e12d12b5..8e9fe0c3e70fbc 100644 --- a/paddle/trainer/Trainer.cpp +++ b/paddle/trainer/Trainer.cpp @@ -88,15 +88,6 @@ P_DEFINE_string(model_list, "", namespace paddle { -void Trainer::init(int argc, char** argv) { - initMain(argc, argv); - initPython(argc, argv); - - auto config = TrainerConfigHelper::createFromFlagConfig(); - feenableexcept(FE_INVALID | FE_DIVBYZERO | FE_OVERFLOW); - - init(config); -} void Trainer::init(const std::shared_ptr &config, bool testing, diff --git a/paddle/trainer/Trainer.h b/paddle/trainer/Trainer.h index 9bfd6d107a2043..3edef9a327cae8 100644 --- a/paddle/trainer/Trainer.h +++ b/paddle/trainer/Trainer.h @@ -72,12 +72,6 @@ class Trainer { const std::shared_ptr &dataProvider = nullptr, const std::shared_ptr &testDataProvider = nullptr); - /** - * Initialize Trainer from command line flags. - */ - void init(int argc, char** argv); - - /** * Train until num_passes reached. * One pass means neural network train through all training data. From a150b50cce4f4f78eed8137c1b131e3d9020ce6c Mon Sep 17 00:00:00 2001 From: wangyanfei01 Date: Thu, 13 Oct 2016 21:25:17 +0800 Subject: [PATCH 2/3] tag 3 useless variables to clean Trainer class --- paddle/trainer/Trainer.cpp | 14 +++++++++----- paddle/trainer/Trainer.h | 9 +-------- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/paddle/trainer/Trainer.cpp b/paddle/trainer/Trainer.cpp index 8e9fe0c3e70fbc..f1c88af0e5b615 100644 --- a/paddle/trainer/Trainer.cpp +++ b/paddle/trainer/Trainer.cpp @@ -88,12 +88,16 @@ P_DEFINE_string(model_list, "", namespace paddle { - void Trainer::init(const std::shared_ptr &config, - bool testing, - const std::shared_ptr &gradientMachine, - const std::shared_ptr &dataProvider, - const std::shared_ptr &testDataProvider) { + bool testing) { + /// @TODO(yanfei): Clean this useless variable in next commit. + /// redundant parameters are removed, set nullptr with local variable + /// to verfiy that these there variables are useless. + /// Compiling Internal opensource/external opensource/metric learning passed + const std::shared_ptr &gradientMachine = nullptr; + const std::shared_ptr &dataProvider = nullptr; + const std::shared_ptr &testDataProvider = nullptr; + this->stats_ = std::make_shared(); config_ = config; diff --git a/paddle/trainer/Trainer.h b/paddle/trainer/Trainer.h index 3edef9a327cae8..c0d730b1ab7751 100644 --- a/paddle/trainer/Trainer.h +++ b/paddle/trainer/Trainer.h @@ -60,17 +60,10 @@ class Trainer { * * @param config TrainerConfig. * @param testing true if only for testing - * @param gradientMachine GradientMachine that will be trained. - * nullptr if create from config. - * @param dataProvider Train Data Provider. null if create from config. - * @param testDataProvider Test Data Provider. null if create from config. */ virtual void init( const std::shared_ptr &config, - bool testing = false, - const std::shared_ptr &gradientMachine = nullptr, - const std::shared_ptr &dataProvider = nullptr, - const std::shared_ptr &testDataProvider = nullptr); + bool testing = false); /** * Train until num_passes reached. From 688131cd3e72b3676f51365c7762950c9c3bd2ea Mon Sep 17 00:00:00 2001 From: wangyanfei01 Date: Thu, 13 Oct 2016 21:49:43 +0800 Subject: [PATCH 3/3] remove 3 nullptr const variables --- paddle/trainer/Trainer.cpp | 16 +++------------- paddle/trainer/TrainerInternal.cpp | 10 +++------- paddle/trainer/TrainerInternal.h | 2 -- 3 files changed, 6 insertions(+), 22 deletions(-) diff --git a/paddle/trainer/Trainer.cpp b/paddle/trainer/Trainer.cpp index f1c88af0e5b615..976784fbc56088 100644 --- a/paddle/trainer/Trainer.cpp +++ b/paddle/trainer/Trainer.cpp @@ -90,14 +90,6 @@ namespace paddle { void Trainer::init(const std::shared_ptr &config, bool testing) { - /// @TODO(yanfei): Clean this useless variable in next commit. - /// redundant parameters are removed, set nullptr with local variable - /// to verfiy that these there variables are useless. - /// Compiling Internal opensource/external opensource/metric learning passed - const std::shared_ptr &gradientMachine = nullptr; - const std::shared_ptr &dataProvider = nullptr; - const std::shared_ptr &testDataProvider = nullptr; - this->stats_ = std::make_shared(); config_ = config; @@ -166,7 +158,7 @@ void Trainer::init(const std::shared_ptr &config, } // initialize trainer internal - trainerInternal_.init(config_, gradientMachine, + trainerInternal_.init(config_, TrainerInternalConfig::createFromMode(mode_), stats_, testing); std::unique_ptr paramConfig( @@ -187,8 +179,7 @@ void Trainer::init(const std::shared_ptr &config, (!IGradientMachineMode::dataMustInCpu(mode_, FLAGS_trainer_count)); - dataProvider_ = dataProvider; - if (!dataProvider_ && config_->hasDataConfig()) { + if (config_->hasDataConfig()) { dataProvider_.reset(DataProvider::create(*config_, *config_, gpuData)); } if (dataProvider_) { @@ -204,8 +195,7 @@ void Trainer::init(const std::shared_ptr &config, } } - testDataProvider_ = testDataProvider; - if (!testDataProvider_ && config_->hasTestDataConfig()) { + if (config_->hasTestDataConfig()) { testDataProvider_.reset( DataProvider::create(config_->getTestDataConfig(), *config_, gpuData)); } diff --git a/paddle/trainer/TrainerInternal.cpp b/paddle/trainer/TrainerInternal.cpp index 6029a4b2c1d0a0..d5b614a6a16bdf 100644 --- a/paddle/trainer/TrainerInternal.cpp +++ b/paddle/trainer/TrainerInternal.cpp @@ -38,7 +38,6 @@ limitations under the License. */ namespace paddle { void TrainerInternal::init(const std::shared_ptr &config, - const GradientMachinePtr &gradientMachine, std::unique_ptr &&intconfig, const std::shared_ptr &stats, bool testing) { @@ -53,12 +52,9 @@ void TrainerInternal::init(const std::shared_ptr &config, createParameterUpdater(testing); } - gradientMachine_ = gradientMachine; - if (!gradientMachine) { - gradientMachine_.reset(GradientMachine::create( - config_->getConfig().model_config(), intconfig_->mode, - parameterUpdater_->getParameterTypes())); - } + gradientMachine_.reset(GradientMachine::create( + config_->getConfig().model_config(), intconfig_->mode, + parameterUpdater_->getParameterTypes())); } void TrainerInternal::trainOneBatch(int64_t batchId, diff --git a/paddle/trainer/TrainerInternal.h b/paddle/trainer/TrainerInternal.h index 17011c4d2e46fe..ed4a9650096ac0 100644 --- a/paddle/trainer/TrainerInternal.h +++ b/paddle/trainer/TrainerInternal.h @@ -50,13 +50,11 @@ class TrainerInternal { /** * Intializes trainer internal class * @param config network config - * @param machine gradient machine * @param intconfig training config * @param stats training stats * @param testing if it is in testing phase */ void init(const std::shared_ptr &config, - const GradientMachinePtr &machine, std::unique_ptr &&intconfig, const std::shared_ptr &stats, bool testing);