Skip to content

Commit

Permalink
follow comments
Browse files Browse the repository at this point in the history
  • Loading branch information
chengduoZH committed May 7, 2018
1 parent aff8a26 commit 676dfd1
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 8 deletions.
9 changes: 3 additions & 6 deletions paddle/fluid/framework/details/fetch_op_handle.cc
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,9 @@ void FetchOpHandle::RunImpl() {
platform::DeviceContextPool::Instance().Get(platform::CPUPlace());
for (auto *input : inputs_) {
auto *var = static_cast<VarHandle *>(input);
if (var->generated_op_) var->generated_op_->Wait(cpu_ctx);
if (var->generated_op_) {
var->generated_op_->Wait(cpu_ctx);
}
}
tensors_.resize(inputs_.size());
auto *var_handle = static_cast<VarHandle *>(inputs_[0]);
Expand All @@ -61,14 +63,9 @@ void FetchOpHandle::RunImpl() {
auto &scope = scopes[i];
auto *var =
scope->FindVar(kLocalExecScopeName)->Get<Scope *>()->FindVar(var_name);
if (var == nullptr) {
scope->FindVar(var_name);
}

PADDLE_ENFORCE_NOT_NULL(var, "Cannot find variable %s in execution scope",
var_name);
auto &t = var->Get<framework::LoDTensor>();

if (platform::is_gpu_place(t.place())) {
#ifdef PADDLE_WITH_CUDA
TensorCopySync(t, cpu, &tensors_[i]);
Expand Down
4 changes: 3 additions & 1 deletion paddle/fluid/framework/details/nccl_all_reduce_op_handle.cc
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,9 @@ void NCCLAllReduceOpHandle::RunImpl() {
// Wait input done
for (auto *in : inputs_) {
auto &p = static_cast<VarHandle *>(in)->place_;
if (in->generated_op_) in->generated_op_->Wait(dev_ctxes_[p]);
if (in->generated_op_) {
in->generated_op_->Wait(dev_ctxes_[p]);
}
}

auto &var_name = static_cast<VarHandle *>(this->inputs_[0])->name_;
Expand Down
4 changes: 3 additions & 1 deletion paddle/fluid/framework/details/send_op_handle.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@ void SendOpHandle::RunImpl() {
if (in->DebugString() == "dummy") { // HACK
continue;
}
if (in->generated_op_) in->generated_op_->Wait(dev_ctxes_[p]);
if (in->generated_op_) {
in->generated_op_->Wait(dev_ctxes_[p]);
}
}
auto &tmp_scope = local_scope_->FindVar(kLocalExecScopeName)->Get<Scope *>();
// FIXME(wuyi): can not use RunAndRecordEvent here, for it will cause dead
Expand Down

0 comments on commit 676dfd1

Please sign in to comment.