// @(#)root/tmva $Id$
// Author: Matt Jachowski 

/**********************************************************************************
 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis       *
 * Package: TMVA                                                                  *
 * Class  : TMVA::TActivationReLU                                                 *
 * Web    : http://tmva.sourceforge.net                                           *
 *                                                                                *
 * Description:                                                                   *
 *      Tanh activation function for TNeuron                                      *
 *                                                                                *
 * Authors (alphabetical):                                                        *
 *      Matt Jachowski  <jachowski@stanford.edu> - Stanford University, USA       *
 *                                                                                *
 * Copyright (c) 2005:                                                            *
 *      CERN, Switzerland                                                         *
 *                                                                                *
 * Redistribution and use in source and binary forms, with or without             *
 * modification, are permitted according to the terms listed in LICENSE           *
 * (http://tmva.sourceforge.net/LICENSE)                                          *
 **********************************************************************************/ 

#ifndef ROOT_TMVA_TActivationReLU
#define ROOT_TMVA_TActivationReLU

//////////////////////////////////////////////////////////////////////////
//                                                                      //
// TActivationReLU                                                      //
//                                                                      //
// Rectified Linear Unit activation function for TNeuron                //
//                                                                      //
//////////////////////////////////////////////////////////////////////////

#ifndef ROOT_TFormula
#include "TFormula.h"
#endif
#ifndef ROOT_TString
#include "TString.h"
#endif

#ifndef ROOT_TMVA_TActivation
#include "TMVA/TActivation.h"
#endif

namespace TMVA {
  
   class TActivationReLU : public TActivation {
    
   public:

      TActivationReLU();
      ~TActivationReLU();

      // evaluate the activation function
      Double_t Eval(Double_t arg) { return arg>0 ? arg : 0;}

      // evaluate the derivative of the activation function
      Double_t EvalDerivative(Double_t arg) { return arg>0 ? 1 : 0;}

      // minimum of the range of the activation function
      Double_t GetMin() { return -1; }

      // maximum of the range of the activation function
      Double_t GetMax() { return 1; }

      // expression for the activation function
      TString GetExpression();

      // writer of function code
      virtual void MakeFunction(std::ostream& fout, const TString& fncName);

   private:

      ClassDef(TActivationReLU,0) // Tanh sigmoid activation function for TNeuron
   };

} // namespace TMVA

#endif
 TActivationReLU.h:1
 TActivationReLU.h:2
 TActivationReLU.h:3
 TActivationReLU.h:4
 TActivationReLU.h:5
 TActivationReLU.h:6
 TActivationReLU.h:7
 TActivationReLU.h:8
 TActivationReLU.h:9
 TActivationReLU.h:10
 TActivationReLU.h:11
 TActivationReLU.h:12
 TActivationReLU.h:13
 TActivationReLU.h:14
 TActivationReLU.h:15
 TActivationReLU.h:16
 TActivationReLU.h:17
 TActivationReLU.h:18
 TActivationReLU.h:19
 TActivationReLU.h:20
 TActivationReLU.h:21
 TActivationReLU.h:22
 TActivationReLU.h:23
 TActivationReLU.h:24
 TActivationReLU.h:25
 TActivationReLU.h:26
 TActivationReLU.h:27
 TActivationReLU.h:28
 TActivationReLU.h:29
 TActivationReLU.h:30
 TActivationReLU.h:31
 TActivationReLU.h:32
 TActivationReLU.h:33
 TActivationReLU.h:34
 TActivationReLU.h:35
 TActivationReLU.h:36
 TActivationReLU.h:37
 TActivationReLU.h:38
 TActivationReLU.h:39
 TActivationReLU.h:40
 TActivationReLU.h:41
 TActivationReLU.h:42
 TActivationReLU.h:43
 TActivationReLU.h:44
 TActivationReLU.h:45
 TActivationReLU.h:46
 TActivationReLU.h:47
 TActivationReLU.h:48
 TActivationReLU.h:49
 TActivationReLU.h:50
 TActivationReLU.h:51
 TActivationReLU.h:52
 TActivationReLU.h:53
 TActivationReLU.h:54
 TActivationReLU.h:55
 TActivationReLU.h:56
 TActivationReLU.h:57
 TActivationReLU.h:58
 TActivationReLU.h:59
 TActivationReLU.h:60
 TActivationReLU.h:61
 TActivationReLU.h:62
 TActivationReLU.h:63
 TActivationReLU.h:64
 TActivationReLU.h:65
 TActivationReLU.h:66
 TActivationReLU.h:67
 TActivationReLU.h:68
 TActivationReLU.h:69
 TActivationReLU.h:70
 TActivationReLU.h:71
 TActivationReLU.h:72
 TActivationReLU.h:73
 TActivationReLU.h:74
 TActivationReLU.h:75
 TActivationReLU.h:76
 TActivationReLU.h:77
 TActivationReLU.h:78
 TActivationReLU.h:79
 TActivationReLU.h:80