Skip to content

Commit

Permalink
Remove GPU stubs for all layers with device-unified Forward() and Bac…
Browse files Browse the repository at this point in the history
…kward().
  • Loading branch information
Rob Hess committed Jul 25, 2014
1 parent 10b8693 commit e316202
Show file tree
Hide file tree
Showing 14 changed files with 0 additions and 58 deletions.
4 changes: 0 additions & 4 deletions src/caffe/layers/conv_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -188,10 +188,6 @@ void ConvolutionLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
}
}

#ifdef CPU_ONLY
STUB_GPU(ConvolutionLayer);
#endif

INSTANTIATE_CLASS(ConvolutionLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/eltwise_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,6 @@ void EltwiseLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
}
}

#ifdef CPU_ONLY
STUB_GPU(EltwiseLayer);
#endif

INSTANTIATE_CLASS(EltwiseLayer);


Expand Down
4 changes: 0 additions & 4 deletions src/caffe/layers/euclidean_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,6 @@ void EuclideanLossLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
}
}

#ifdef CPU_ONLY
STUB_GPU(EuclideanLossLayer);
#endif

INSTANTIATE_CLASS(EuclideanLossLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/flatten_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,6 @@ void FlattenLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
(*bottom)[0]->ShareDiff(*top[0]);
}

#ifdef CPU_ONLY
STUB_GPU(FlattenLayer);
#endif

INSTANTIATE_CLASS(FlattenLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/hdf5_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -114,10 +114,6 @@ Dtype HDF5DataLayer<Dtype>::Forward(const vector<Blob<Dtype>*>& bottom,
return Dtype(0.);
}

#ifdef CPU_ONLY
STUB_GPU_FORWARD(HDF5DataLayer, Forward);
#endif

INSTANTIATE_CLASS(HDF5DataLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/hdf5_output_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,6 @@ Dtype HDF5OutputLayer<Dtype>::Forward(const vector<Blob<Dtype>*>& bottom,
return Dtype(0.);
}

#ifdef CPU_ONLY
STUB_GPU(HDF5OutputLayer);
#endif

INSTANTIATE_CLASS(HDF5OutputLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/im2col_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,6 @@ void Im2colLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
}
}

#ifdef CPU_ONLY
STUB_GPU(Im2colLayer);
#endif

INSTANTIATE_CLASS(Im2colLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/image_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -281,10 +281,6 @@ Dtype ImageDataLayer<Dtype>::Forward(const vector<Blob<Dtype>*>& bottom,
return Dtype(0.);
}

#ifdef CPU_ONLY
STUB_GPU_FORWARD(ImageDataLayer, Forward);
#endif

INSTANTIATE_CLASS(ImageDataLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/inner_product_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,6 @@ void InnerProductLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
}
}

#ifdef CPU_ONLY
STUB_GPU(InnerProductLayer);
#endif

INSTANTIATE_CLASS(InnerProductLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/power_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,6 @@ void PowerLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
}
}

#ifdef CPU_ONLY
STUB_GPU(PowerLayer);
#endif

INSTANTIATE_CLASS(PowerLayer);


Expand Down
4 changes: 0 additions & 4 deletions src/caffe/layers/sigmoid_cross_entropy_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,6 @@ void SigmoidCrossEntropyLossLayer<Dtype>::Backward(
}
}

#ifdef CPU_ONLY
STUB_GPU(SigmoidCrossEntropyLossLayer);
#endif

INSTANTIATE_CLASS(SigmoidCrossEntropyLossLayer);


Expand Down
5 changes: 0 additions & 5 deletions src/caffe/layers/softmax_loss_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,6 @@ void SoftmaxWithLossLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
}
}


#ifdef CPU_ONLY
STUB_GPU(SoftmaxWithLossLayer);
#endif

INSTANTIATE_CLASS(SoftmaxWithLossLayer);


Expand Down
5 changes: 0 additions & 5 deletions src/caffe/layers/split_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,6 @@ void SplitLayer<Dtype>::Backward(const vector<Blob<Dtype>*>& top,
}
}


#ifdef CPU_ONLY
STUB_GPU(SplitLayer);
#endif

INSTANTIATE_CLASS(SplitLayer);

} // namespace caffe
4 changes: 0 additions & 4 deletions src/caffe/layers/window_data_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -448,10 +448,6 @@ Dtype WindowDataLayer<Dtype>::Forward(const vector<Blob<Dtype>*>& bottom,
return Dtype(0.);
}

#ifdef CPU_ONLY
STUB_GPU_FORWARD(WindowDataLayer, Forward);
#endif

INSTANTIATE_CLASS(WindowDataLayer);

} // namespace caffe

0 comments on commit e316202

Please sign in to comment.