Logo ROOT  
Reference Guide
 
Loading...
Searching...
No Matches
TMVA::DNN::TSharedLayer< Architecture_t > Class Template Reference

template<typename Architecture_t>
class TMVA::DNN::TSharedLayer< Architecture_t >

Layer class width shared weight and bias layers.

Like the Layer class only that weight matrices are shared between different instances of the net, which can be used to implement multithreading 'Hogwild' style.

Definition at line 147 of file Layer.h.

Public Types

using Matrix_t = typename Architecture_t::Matrix_t
 
using Scalar_t = typename Architecture_t::Scalar_t
 
using Tensor_t = typename Architecture_t::Tensor_t
 

Public Member Functions

 TSharedLayer (const TSharedLayer &layer)
 
 TSharedLayer (size_t fBatchSize, TLayer< Architecture_t > &layer)
 
void Backward (Matrix_t &gradients_backward, const Matrix_t &activations_backward, ERegularization r, Scalar_t weightDecay)
 Compute weight, bias and activation gradients.
 
void Forward (Matrix_t &input, bool applyDropout=false)
 Compute activation of the layer for the given input.
 
EActivationFunction GetActivationFunction () const
 
Matrix_tGetActivationGradients ()
 
const Matrix_tGetActivationGradients () const
 
size_t GetBatchSize () const
 
Matrix_tGetBiases ()
 
const Matrix_tGetBiases () const
 
Matrix_tGetBiasGradients ()
 
const Matrix_tGetBiasGradients () const
 
size_t GetDropoutProbability () const
 
size_t GetInputWidth () const
 
Matrix_tGetOutput ()
 
const Matrix_tGetOutput () const
 
Matrix_tGetWeightGradients ()
 
const Matrix_tGetWeightGradients () const
 
Matrix_tGetWeights () const
 
size_t GetWidth () const
 
void Print () const
 
void SetDropoutProbability (Scalar_t p)
 

Private Attributes

Matrix_t fActivationGradients
 Gradients w.r.t. the activations of this layer.
 
size_t fBatchSize
 Batch size used for training and evaluation.
 
Matrix_tfBiases
 Reference to the bias vectors of this layer.
 
Matrix_t fBiasGradients
 Gradients w.r.t. the bias values of this layer.
 
Matrix_t fDerivatives
 First fDerivatives of the activations of this layer.
 
Scalar_t fDropoutProbability
 Probability that an input is active.
 
EActivationFunction fF
 Activation function of the layer.
 
size_t fInputWidth
 Number of neurons of the previous layer.
 
Matrix_t fOutput
 Activations of this layer.
 
Matrix_t fWeightGradients
 Gradients w.r.t. the weights of this layer.
 
Matrix_tfWeights
 Reference to the weight matrix of this layer.
 
size_t fWidth
 Number of neurons of this layer.
 

#include <TMVA/DNN/Layer.h>

Member Typedef Documentation

◆ Matrix_t

template<typename Architecture_t >
using TMVA::DNN::TSharedLayer< Architecture_t >::Matrix_t = typename Architecture_t::Matrix_t

Definition at line 153 of file Layer.h.

◆ Scalar_t

template<typename Architecture_t >
using TMVA::DNN::TSharedLayer< Architecture_t >::Scalar_t = typename Architecture_t::Scalar_t

Definition at line 152 of file Layer.h.

◆ Tensor_t

template<typename Architecture_t >
using TMVA::DNN::TSharedLayer< Architecture_t >::Tensor_t = typename Architecture_t::Tensor_t

Definition at line 154 of file Layer.h.

Constructor & Destructor Documentation

◆ TSharedLayer() [1/2]

template<typename Architecture_t >
TMVA::DNN::TSharedLayer< Architecture_t >::TSharedLayer ( size_t  fBatchSize,
TLayer< Architecture_t > &  layer 
)

Definition at line 328 of file Layer.h.

◆ TSharedLayer() [2/2]

template<typename Architecture_t >
TMVA::DNN::TSharedLayer< Architecture_t >::TSharedLayer ( const TSharedLayer< Architecture_t > &  layer)

Definition at line 343 of file Layer.h.

Member Function Documentation

◆ Backward()

template<typename Architecture_t >
auto TMVA::DNN::TSharedLayer< Architecture_t >::Backward ( Matrix_t gradients_backward,
const Matrix_t activations_backward,
ERegularization  r,
Scalar_t  weightDecay 
)
inline

Compute weight, bias and activation gradients.

Uses the precomputed first partial derivatives of the activation function computed during forward propagation and modifies them. Must only be called directly a the corresponding call to Forward(...).

Definition at line 373 of file Layer.h.

◆ Forward()

template<typename Architecture_t >
auto TMVA::DNN::TSharedLayer< Architecture_t >::Forward ( Matrix_t input,
bool  applyDropout = false 
)
inline

Compute activation of the layer for the given input.

The input must be in matrix form with the different rows corresponding to different events in the batch. Computes activations as well as the first partial derivative of the activation function at those activations.

Definition at line 356 of file Layer.h.

◆ GetActivationFunction()

template<typename Architecture_t >
EActivationFunction TMVA::DNN::TSharedLayer< Architecture_t >::GetActivationFunction ( ) const
inline

Definition at line 205 of file Layer.h.

◆ GetActivationGradients() [1/2]

template<typename Architecture_t >
Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetActivationGradients ( )
inline

Definition at line 212 of file Layer.h.

◆ GetActivationGradients() [2/2]

template<typename Architecture_t >
const Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetActivationGradients ( ) const
inline

Definition at line 213 of file Layer.h.

◆ GetBatchSize()

template<typename Architecture_t >
size_t TMVA::DNN::TSharedLayer< Architecture_t >::GetBatchSize ( ) const
inline

Definition at line 198 of file Layer.h.

◆ GetBiases() [1/2]

template<typename Architecture_t >
Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetBiases ( )
inline

Definition at line 210 of file Layer.h.

◆ GetBiases() [2/2]

template<typename Architecture_t >
const Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetBiases ( ) const
inline

Definition at line 211 of file Layer.h.

◆ GetBiasGradients() [1/2]

template<typename Architecture_t >
Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetBiasGradients ( )
inline

Definition at line 214 of file Layer.h.

◆ GetBiasGradients() [2/2]

template<typename Architecture_t >
const Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetBiasGradients ( ) const
inline

Definition at line 215 of file Layer.h.

◆ GetDropoutProbability()

template<typename Architecture_t >
size_t TMVA::DNN::TSharedLayer< Architecture_t >::GetDropoutProbability ( ) const
inline

Definition at line 201 of file Layer.h.

◆ GetInputWidth()

template<typename Architecture_t >
size_t TMVA::DNN::TSharedLayer< Architecture_t >::GetInputWidth ( ) const
inline

Definition at line 199 of file Layer.h.

◆ GetOutput() [1/2]

template<typename Architecture_t >
Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetOutput ( )
inline

Definition at line 207 of file Layer.h.

◆ GetOutput() [2/2]

template<typename Architecture_t >
const Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetOutput ( ) const
inline

Definition at line 208 of file Layer.h.

◆ GetWeightGradients() [1/2]

template<typename Architecture_t >
Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetWeightGradients ( )
inline

Definition at line 216 of file Layer.h.

◆ GetWeightGradients() [2/2]

template<typename Architecture_t >
const Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetWeightGradients ( ) const
inline

Definition at line 217 of file Layer.h.

◆ GetWeights()

template<typename Architecture_t >
Matrix_t & TMVA::DNN::TSharedLayer< Architecture_t >::GetWeights ( ) const
inline

Definition at line 209 of file Layer.h.

◆ GetWidth()

template<typename Architecture_t >
size_t TMVA::DNN::TSharedLayer< Architecture_t >::GetWidth ( ) const
inline

Definition at line 200 of file Layer.h.

◆ Print()

template<typename Architecture_t >
void TMVA::DNN::TSharedLayer< Architecture_t >::Print

Definition at line 393 of file Layer.h.

◆ SetDropoutProbability()

template<typename Architecture_t >
void TMVA::DNN::TSharedLayer< Architecture_t >::SetDropoutProbability ( Scalar_t  p)
inline

Definition at line 203 of file Layer.h.

Member Data Documentation

◆ fActivationGradients

template<typename Architecture_t >
Matrix_t TMVA::DNN::TSharedLayer< Architecture_t >::fActivationGradients
private

Gradients w.r.t. the activations of this layer.

Definition at line 171 of file Layer.h.

◆ fBatchSize

template<typename Architecture_t >
size_t TMVA::DNN::TSharedLayer< Architecture_t >::fBatchSize
private

Batch size used for training and evaluation.

Definition at line 159 of file Layer.h.

◆ fBiases

template<typename Architecture_t >
Matrix_t& TMVA::DNN::TSharedLayer< Architecture_t >::fBiases
private

Reference to the bias vectors of this layer.

Definition at line 166 of file Layer.h.

◆ fBiasGradients

template<typename Architecture_t >
Matrix_t TMVA::DNN::TSharedLayer< Architecture_t >::fBiasGradients
private

Gradients w.r.t. the bias values of this layer.

Definition at line 170 of file Layer.h.

◆ fDerivatives

template<typename Architecture_t >
Matrix_t TMVA::DNN::TSharedLayer< Architecture_t >::fDerivatives
private

First fDerivatives of the activations of this layer.

Definition at line 168 of file Layer.h.

◆ fDropoutProbability

template<typename Architecture_t >
Scalar_t TMVA::DNN::TSharedLayer< Architecture_t >::fDropoutProbability
private

Probability that an input is active.

Definition at line 163 of file Layer.h.

◆ fF

template<typename Architecture_t >
EActivationFunction TMVA::DNN::TSharedLayer< Architecture_t >::fF
private

Activation function of the layer.

Definition at line 173 of file Layer.h.

◆ fInputWidth

template<typename Architecture_t >
size_t TMVA::DNN::TSharedLayer< Architecture_t >::fInputWidth
private

Number of neurons of the previous layer.

Definition at line 160 of file Layer.h.

◆ fOutput

template<typename Architecture_t >
Matrix_t TMVA::DNN::TSharedLayer< Architecture_t >::fOutput
private

Activations of this layer.

Definition at line 167 of file Layer.h.

◆ fWeightGradients

template<typename Architecture_t >
Matrix_t TMVA::DNN::TSharedLayer< Architecture_t >::fWeightGradients
private

Gradients w.r.t. the weights of this layer.

Definition at line 169 of file Layer.h.

◆ fWeights

template<typename Architecture_t >
Matrix_t& TMVA::DNN::TSharedLayer< Architecture_t >::fWeights
private

Reference to the weight matrix of this layer.

Definition at line 165 of file Layer.h.

◆ fWidth

template<typename Architecture_t >
size_t TMVA::DNN::TSharedLayer< Architecture_t >::fWidth
private

Number of neurons of this layer.

Definition at line 161 of file Layer.h.

  • tmva/tmva/inc/TMVA/DNN/Layer.h