File indexing completed on 2025-01-18 10:11:00
0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024
0025
0026
0027
0028
0029
0030
0031
0032
0033
0034
0035 #ifndef ROOT_TMVA_MethodCFMlpANN_Utils
0036 #define ROOT_TMVA_MethodCFMlpANN_Utils
0037
0038 #include "TMVA/MethodCFMlpANN_def.h"
0039 #include "TMVA/MsgLogger.h"
0040
0041 #include "Rtypes.h"
0042
0043 #include <cstdlib>
0044
0045
0046
0047
0048
0049
0050
0051
0052 namespace TMVA {
0053
0054 class MethodCFMlpANN_Utils {
0055
0056 public:
0057
0058 MethodCFMlpANN_Utils();
0059 virtual ~MethodCFMlpANN_Utils();
0060
0061 protected:
0062
0063 void Train_nn( Double_t *tin2, Double_t *tout2, Int_t *ntrain,
0064 Int_t *ntest, Int_t *nvar2, Int_t *nlayer,
0065 Int_t *nodes, Int_t *ncycle );
0066
0067 void Entree_new( Int_t *, char *, Int_t *ntrain, Int_t *ntest,
0068 Int_t *numlayer, Int_t *nodes, Int_t *numcycle,
0069 Int_t );
0070
0071 virtual Int_t DataInterface( Double_t*, Double_t*, Int_t*, Int_t*, Int_t*, Int_t*,
0072 Double_t*, Int_t*, Int_t* ) = 0;
0073
0074 Double_t Fdecroi(Int_t *i__);
0075 Double_t Sen3a(void);
0076
0077 void Wini ();
0078 void En_avant (Int_t *ievent);
0079 void En_avant2 (Int_t *ievent);
0080 void En_arriere(Int_t *ievent);
0081 void Leclearn (Int_t *ktest, Double_t *tout2, Double_t *tin2);
0082 void Out (Int_t *iii, Int_t *maxcycle);
0083 void Cout (Int_t *, Double_t *xxx);
0084 void Innit (char *det, Double_t *tout2, Double_t *tin2, Int_t );
0085 void TestNN ();
0086 void Inl ();
0087 void GraphNN (Int_t *ilearn, Double_t *, Double_t *, char *, Int_t);
0088 void Foncf (Int_t *i__, Double_t *u, Double_t *f);
0089 void Cout2 (Int_t * , Double_t *yyy);
0090 void Lecev2 (Int_t *ktest, Double_t *tout2, Double_t *tin2);
0091 void Arret (const char* mot );
0092 void CollectVar(Int_t *nvar, Int_t *class__, Double_t *xpg);
0093
0094 protected:
0095
0096 Int_t fg_100;
0097 Int_t fg_0;
0098 static const Int_t fg_max_nVar_;
0099 static const Int_t fg_max_nNodes_;
0100 Int_t fg_999;
0101 static const char* const fg_MethodName;
0102
0103 Double_t W_ref(const Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) const {
0104 return wNN [(a_3*max_nNodes_ + a_2)*max_nLayers_ + a_1 - 187];
0105 }
0106 Double_t& W_ref(Double_t wNN[], Int_t a_1, Int_t a_2, Int_t a_3) {
0107 return wNN [((a_3)*max_nNodes_ + (a_2))*max_nLayers_ + a_1 - 187];
0108 }
0109
0110 Double_t Ww_ref(const Double_t wwNN[], Int_t a_1,Int_t a_2) const {
0111 return wwNN[(a_2)*max_nLayers_ + a_1 - 7];
0112 }
0113 Double_t& Ww_ref(Double_t wwNN[], Int_t a_1,Int_t a_2) {
0114 return wwNN[(a_2)*max_nLayers_ + a_1 - 7];
0115 }
0116
0117
0118 struct {
0119 Double_t epsmin, epsmax, eeps, eta;
0120 Int_t layerm, lclass, nevl, nblearn, nunilec, nunisor, nunishort, nunap;
0121 Int_t nvar, itest, ndiv, ichoi, ndivis, nevt;
0122 } fParam_1;
0123
0124
0125 struct {
0126 Double_t xmax[max_nVar_], xmin[max_nVar_];
0127 Int_t nclass[max_Events_], mclass[max_Events_], iclass;
0128 } fVarn_1;
0129
0130
0131 class VARn2 {
0132 public:
0133 VARn2() : fNevt(0), fNvar(0) {
0134 fxx = nullptr;
0135 }
0136 ~VARn2() {
0137 Delete();
0138 }
0139 void Create( Int_t nevt, Int_t nvar ) {
0140 fNevt = nevt+1; fNvar = nvar+1;
0141 fxx = new Double_t*[fNevt];
0142 for (Int_t i=0; i<fNevt; i++) fxx[i] = new Double_t[fNvar];
0143 }
0144 Double_t operator=( Double_t val ) { return val; }
0145 Double_t &operator()( Int_t ievt, Int_t ivar ) const {
0146 if (fxx && ievt < fNevt && ivar < fNvar) return fxx[ievt][ivar];
0147 else {
0148 printf( "*** ERROR in varn3_(): fxx is zero pointer ==> abort ***\n") ;
0149 std::exit(1);
0150 return fxx[0][0];
0151 }
0152 }
0153 void Delete( void ) {
0154 if (fxx) for (Int_t i=0; i<fNevt; i++) if (fxx[i]) delete [] fxx[i];
0155 delete[] fxx;
0156 fxx=nullptr;
0157 }
0158
0159 Double_t** fxx;
0160 Int_t fNevt;
0161 Int_t fNvar;
0162 } fVarn2_1, fVarn3_1;
0163
0164
0165 struct {
0166 Double_t x[max_nLayers_*max_nNodes_];
0167 Double_t y[max_nLayers_*max_nNodes_];
0168 Double_t o[max_nNodes_];
0169 Double_t w[max_nLayers_*max_nNodes_*max_nNodes_];
0170 Double_t ww[max_nLayers_*max_nNodes_];
0171 Double_t cut[max_nNodes_];
0172 Double_t deltaww[max_nLayers_*max_nNodes_];
0173 Int_t neuron[max_nLayers_];
0174 } fNeur_1;
0175
0176
0177 struct {
0178 Double_t coef[max_nNodes_], temp[max_nLayers_], demin, demax;
0179 Double_t del[max_nLayers_*max_nNodes_];
0180 Double_t delw[max_nLayers_*max_nNodes_*max_nNodes_];
0181 Double_t delta[max_nLayers_*max_nNodes_*max_nNodes_];
0182 Double_t delww[max_nLayers_*max_nNodes_];
0183 Int_t idde;
0184 } fDel_1;
0185
0186
0187 struct {
0188 Double_t ancout, tolcou;
0189 Int_t ieps;
0190 } fCost_1;
0191
0192 void SetLogger(MsgLogger *l) { fLogger = l; }
0193
0194 private:
0195 MsgLogger * fLogger;
0196 MsgLogger& ULog() { if (fLogger) return *fLogger; return *(fLogger = new MsgLogger("CFMLP_Utils")); }
0197
0198 public:
0199
0200 ClassDef(MethodCFMlpANN_Utils,0);
0201 };
0202
0203 }
0204
0205 #endif