25template<
typename Scalar_t>
 
   52   for (
size_t i = 0; i < (size_t) df.GetNrows(); i++) {
 
   53      for (
size_t j = 0; j < (size_t) df.GetNcols(); j++) {
 
   54         df(i,j) *= state_gradients_backward(i,j);      
 
   59   if (input_gradient.GetNoElements() > 0) {
 
   60      input_gradient.Mult(df, weights_input);     
 
   63   if (state_gradients_backward.GetNoElements() > 0) {
 
   64      state_gradients_backward.Mult(df, weights_state);  
 
   68   if (input_weight_gradients.GetNoElements() > 0) {
 
   70      input_weight_gradients.TMult(df, input);             
 
   71      input_weight_gradients += tmp;
 
   73   if (state_weight_gradients.GetNoElements() > 0) {
 
   75      state_weight_gradients.TMult(df, state);             
 
   76      state_weight_gradients += tmp;
 
   80   if (bias_gradients.GetNoElements() > 0) {
 
   82      for (
size_t j = 0; j < (size_t) df.GetNcols(); j++) {
 
   85         for (
size_t i = 0; i < (size_t) df.GetNrows(); i++) {
 
   88         bias_gradients(j,0) += 
sum;
 
  104   return input_gradient;
 
static Matrix_t & RecurrentLayerBackward(TMatrixT< Scalar_t > &state_gradients_backward, TMatrixT< Scalar_t > &input_weight_gradients, TMatrixT< Scalar_t > &state_weight_gradients, TMatrixT< Scalar_t > &bias_gradients, TMatrixT< Scalar_t > &df, const TMatrixT< Scalar_t > &state, const TMatrixT< Scalar_t > &weights_input, const TMatrixT< Scalar_t > &weights_state, const TMatrixT< Scalar_t > &input, TMatrixT< Scalar_t > &input_gradient)
Backpropagation step for a Recurrent Neural Network.
create variable transformations
static long int sum(long int i)