Skip to content

Commit

Permalink
clean code
Browse files Browse the repository at this point in the history
  • Loading branch information
Superjomn committed Oct 11, 2017
1 parent 3850325 commit cc50ca9
Showing 1 changed file with 0 additions and 16 deletions.
16 changes: 0 additions & 16 deletions paddle/operators/dynamic_recurrent_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,6 @@ void DynamicRecurrentOp::Run(const Scope& scope,

// call stepnet in all the time steps
for (size_t step = 0; step < cache_.num_steps; step++) {
LOG(INFO) << "run step " << step;
auto& step_scope = cache_.GetScope(step);
stepnet_->Run(step_scope, dev_ctx);
}
Expand All @@ -112,23 +111,10 @@ void DynamicRecurrentOp::SplitInputs() const {
const auto& var = item.second;
const auto& tensor = var->Get<LoDTensor>();
TensorArray& ta = step_inputs_[item.first];
// NOTE only for debug
LOG(INFO) << "unpack lod " << tensor.dims();
LOG(INFO) << "lod: ";
for (auto& vec : tensor.lod()) {
for (auto i : vec) {
LOG(INFO) << i;
}
}

dy_seq_metas_[item.first] =
ta.Unpack(tensor, level, true /*length_descend*/);

// NOTE for debug
for (size_t i = 0; i < ta.size(); i++) {
LOG(INFO) << i << "-th tensor " << ta.Read(i).dims();
}

if (cache_.num_steps) {
PADDLE_ENFORCE_EQ(ta.size(), cache_.num_steps,
"inputs should have the same steps");
Expand Down Expand Up @@ -261,7 +247,6 @@ void DynamicRecurrentOp::LinkState(const rnn::MemoryAttr& memory,

LoDTensor* pre_state{nullptr};
if (step == 0) {
LOG(INFO) << "init 0-th prestate";
pre_state = cache_.GetTensor(*cache_.scope, memory.boot_var);
pre_state->mutable_data<float>(platform::CPUPlace());
// allocate memory
Expand All @@ -270,7 +255,6 @@ void DynamicRecurrentOp::LinkState(const rnn::MemoryAttr& memory,
detail::ReorderBootState<value_type>(some_meta, *pre_state, &state_pre,
pre_state->place());
} else {
LOG(INFO) << "init " << step << "-th prestate";
pre_state = cache_.GetTensor(cache_.GetScope(step - 1), memory.var);
}

Expand Down

0 comments on commit cc50ca9

Please sign in to comment.