aboutsummaryrefslogtreecommitdiffhomepage
path: root/third_party/eigen3/unsupported/Eigen/CXX11/src/NeuralNetworks/Activations.h
diff options
context:
space:
mode:
authorGravatar Manjunath Kudlur <keveman@gmail.com>2015-11-06 16:27:58 -0800
committerGravatar Manjunath Kudlur <keveman@gmail.com>2015-11-06 16:27:58 -0800
commitf41959ccb2d9d4c722fe8fc3351401d53bcf4900 (patch)
treeef0ca22cb2a5ac4bdec9d080d8e0788a53ed496d /third_party/eigen3/unsupported/Eigen/CXX11/src/NeuralNetworks/Activations.h
TensorFlow: Initial commit of TensorFlow library.
TensorFlow is an open source software library for numerical computation using data flow graphs. Base CL: 107276108
Diffstat (limited to 'third_party/eigen3/unsupported/Eigen/CXX11/src/NeuralNetworks/Activations.h')
-rw-r--r--third_party/eigen3/unsupported/Eigen/CXX11/src/NeuralNetworks/Activations.h116
1 files changed, 116 insertions, 0 deletions
diff --git a/third_party/eigen3/unsupported/Eigen/CXX11/src/NeuralNetworks/Activations.h b/third_party/eigen3/unsupported/Eigen/CXX11/src/NeuralNetworks/Activations.h
new file mode 100644
index 0000000000..94d616f2b5
--- /dev/null
+++ b/third_party/eigen3/unsupported/Eigen/CXX11/src/NeuralNetworks/Activations.h
@@ -0,0 +1,116 @@
+// This file is part of Eigen, a lightweight C++ template library
+// for linear algebra.
+//
+// Copyright (C) 2015 Benoit Steiner <benoit.steiner.goog@gmail.com>
+//
+// This Source Code Form is subject to the terms of the Mozilla
+// Public License v. 2.0. If a copy of the MPL was not distributed
+// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
+#ifndef EIGEN_CXX11_NEURAL_NETWORKS_ACTIVATIONS_H
+#define EIGEN_CXX11_NEURAL_NETWORKS_ACTIVATIONS_H
+
+namespace Eigen {
+
+/** scalar_sigmoid_fast_derivative_op
+ * \ingroup CXX11_NeuralNetworks_Module
+ * \brief Template functor to compute the fast derivative of a sigmoid
+ *
+ * Input should be the backpropagated gradient.
+ *
+ * \sa class CwiseUnaryOp, Cwise::sigmoid_fast_derivative()
+ */
+template <typename T>
+struct scalar_sigmoid_fast_derivative_op {
+ EIGEN_EMPTY_STRUCT_CTOR(scalar_sigmoid_fast_derivative_op)
+ EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T operator()(const T& y) const {
+ const T one = T(1);
+ return (one - y) * y;
+ }
+
+ template <typename Packet>
+ inline Packet packetOp(const Packet& y) const {
+ const Packet one = internal::pset1<Packet>(1);
+ return internal::pmul(internal::psub(one, y), y);
+ }
+};
+
+namespace internal {
+template <typename T>
+struct functor_traits<scalar_sigmoid_fast_derivative_op<T> > {
+ enum {
+ Cost = NumTraits<T>::AddCost * 2 + NumTraits<T>::MulCost,
+ PacketAccess = packet_traits<T>::HasAdd && packet_traits<T>::HasMul &&
+ packet_traits<T>::HasNegate
+ };
+};
+} // namespace internal
+
+/** scalar_tanh_fast_derivative_op
+ * \ingroup CXX11_NeuralNetworks_Module
+ * \brief Template functor to compute the fast derivative of a tanh
+ *
+ * Input should be the backpropagated gradient.
+ *
+ * \sa class CwiseUnaryOp, Cwise::tanh_fast_derivative()
+ */
+template <typename T>
+struct scalar_tanh_fast_derivative_op {
+ EIGEN_EMPTY_STRUCT_CTOR(scalar_tanh_fast_derivative_op)
+ EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE T operator()(const T& y) const {
+ const T one = T(1);
+ return one - (y * y);
+ }
+
+ template <typename Packet>
+ inline Packet packetOp(const Packet& y) const {
+ const Packet one = internal::pset1<Packet>(1);
+ return internal::psub(one, internal::pmul(y, y));
+ }
+};
+
+namespace internal {
+template <typename T>
+struct functor_traits<scalar_tanh_fast_derivative_op<T> > {
+ enum {
+ Cost = NumTraits<T>::AddCost * 2 + NumTraits<T>::MulCost * 1,
+ PacketAccess = packet_traits<T>::HasAdd && packet_traits<T>::HasMul &&
+ packet_traits<T>::HasNegate
+ };
+};
+} // namespace internal
+
+/**
+ * \ingroup CXX11_NeuralNetworks_Module
+ * \brief Template functor to clip the the magnitude of the first scalar.
+ *
+ * \sa class CwiseBinaryOp, MatrixBase::Clip
+ */
+template <typename Scalar>
+struct scalar_clip_op {
+ EIGEN_EMPTY_STRUCT_CTOR(scalar_clip_op)
+ EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar
+ operator()(const Scalar& a, const Scalar& b) const {
+ return numext::mini(numext::maxi(a, -b), b);
+ }
+ template <typename Packet>
+ EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Packet
+ packetOp(const Packet& a, const Packet& b) const {
+ return internal::pmin(internal::pmax(a, internal::pnegate(b)), b);
+ }
+};
+
+namespace internal {
+template <typename Scalar>
+struct functor_traits<scalar_clip_op<Scalar> > {
+ enum {
+ Cost = NumTraits<Scalar>::AddCost * 3,
+ PacketAccess = packet_traits<Scalar>::HasMax &&
+ packet_traits<Scalar>::HasMin &&
+ packet_traits<Scalar>::HasNegate
+ };
+};
+} // namespace internal
+
+} // end namespace Eigen
+
+#endif // EIGEN_CXX11_NEURAL_NETWORKS_ACTIVATIONS_H