|
||||
File indexing completed on 2025-01-30 10:23:04
0001 // @(#)root/tmva $Id$ 0002 // Author: Matt Jachowski 0003 0004 /********************************************************************************** 0005 * Project: TMVA - a Root-integrated toolkit for multivariate data analysis * 0006 * Package: TMVA * 0007 * Class : TMVA::TActivationTanh * 0008 * * 0009 * * 0010 * Description: * 0011 * Tanh activation function for TNeuron * 0012 * * 0013 * Authors (alphabetical): * 0014 * Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA * 0015 * * 0016 * Copyright (c) 2005: * 0017 * CERN, Switzerland * 0018 * * 0019 * Redistribution and use in source and binary forms, with or without * 0020 * modification, are permitted according to the terms listed in LICENSE * 0021 * (see tmva/doc/LICENSE) * 0022 **********************************************************************************/ 0023 0024 #ifndef ROOT_TMVA_TActivationTanh 0025 #define ROOT_TMVA_TActivationTanh 0026 0027 ////////////////////////////////////////////////////////////////////////// 0028 // // 0029 // TActivationTanh // 0030 // // 0031 // Tanh activation function for TNeuron // 0032 // // 0033 ////////////////////////////////////////////////////////////////////////// 0034 0035 #include "TString.h" 0036 0037 #include "TMVA/TActivation.h" 0038 0039 namespace TMVA { 0040 0041 class TActivationTanh : public TActivation { 0042 0043 public: 0044 0045 TActivationTanh() {} 0046 ~TActivationTanh() {} 0047 0048 // evaluate the activation function 0049 Double_t Eval(Double_t arg); 0050 0051 // evaluate the derivative of the activation function 0052 Double_t EvalDerivative(Double_t arg); 0053 0054 // minimum of the range of the activation function 0055 Double_t GetMin() { return -1; } 0056 0057 // maximum of the range of the activation function 0058 Double_t GetMax() { return 1; } 0059 0060 // expression for the activation function 0061 TString GetExpression(); 0062 0063 // writer of function code 0064 virtual void MakeFunction(std::ostream& fout, const TString& fncName); 0065 0066 void SetSlow(){fFAST=kFALSE;} // to ensure old training files will be process with old tanh code 0067 private: 0068 // fast tanh approximation 0069 Double_t fast_tanh(Double_t arg); 0070 Bool_t fFAST; 0071 0072 ClassDef(TActivationTanh,0); // Tanh sigmoid activation function for TNeuron 0073 }; 0074 0075 } // namespace TMVA 0076 0077 #endif
[ Source navigation ] | [ Diff markup ] | [ Identifier search ] | [ general search ] |
This page was automatically generated by the 2.3.7 LXR engine. The LXR team |