diff options
author | A. Unique TensorFlower <gardener@tensorflow.org> | 2018-07-17 18:08:14 -0700 |
---|---|---|
committer | TensorFlower Gardener <gardener@tensorflow.org> | 2018-07-17 18:11:21 -0700 |
commit | 8238266c4fd433107f38eb126a5c5da05a4d338b (patch) | |
tree | fc4f923e52e8df2aedde5bc180766d501b9b61bf /tensorflow/stream_executor/cuda | |
parent | 07cc6474b219ee3ad9f55860e621f61b34bb6bd1 (diff) |
Support identity activation function in Cudnn implementation of fused conv2d bias activation.
PiperOrigin-RevId: 205008958
Diffstat (limited to 'tensorflow/stream_executor/cuda')
-rw-r--r-- | tensorflow/stream_executor/cuda/cuda_dnn.cc | 10 |
1 files changed, 8 insertions, 2 deletions
diff --git a/tensorflow/stream_executor/cuda/cuda_dnn.cc b/tensorflow/stream_executor/cuda/cuda_dnn.cc index 08228034f7..e85b6db511 100644 --- a/tensorflow/stream_executor/cuda/cuda_dnn.cc +++ b/tensorflow/stream_executor/cuda/cuda_dnn.cc @@ -791,6 +791,11 @@ class CudnnActivationDescriptor { double relu_ceiling = 0.0; cudnnActivationMode_t mode; switch (activation_mode) { +#if CUDNN_VERSION >= 7100 + case dnn::ActivationMode::kNone: + mode = CUDNN_ACTIVATION_IDENTITY; + break; +#endif case dnn::ActivationMode::kRelu6: relu_ceiling = 6.0; mode = CUDNN_ACTIVATION_CLIPPED_RELU; @@ -2493,10 +2498,11 @@ port::Status CudnnSupport::DoFusedConvolveImpl( DeviceMemory<Type>* output_data, ScratchAllocator* scratch_allocator, const dnn::AlgorithmConfig& algorithm_config, dnn::ProfileResult* output_profile_result) { - if (activation_mode != dnn::ActivationMode::kRelu) { + if (activation_mode != dnn::ActivationMode::kRelu && + activation_mode != dnn::ActivationMode::kNone) { return port::Status(port::error::INVALID_ARGUMENT, "cudnnConvolutionBiasActivationForward() only supports " - "Relu activation."); + "Relu or None activation."); } CudnnTensorDescriptor conv_input_nd( |