Back to home page

EIC code displayed by LXR

 
 

    


Warning, file /include/root/TMultiLayerPerceptron.h was not indexed or was modified since last indexation (in which case cross-reference links may be missing, inaccurate or erroneous).

0001 // @(#)root/mlp:$Id$
0002 // Author: Christophe.Delaere@cern.ch   20/07/03
0003 
0004 /*************************************************************************
0005  * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers.               *
0006  * All rights reserved.                                                  *
0007  *                                                                       *
0008  * For the licensing terms see $ROOTSYS/LICENSE.                         *
0009  * For the list of contributors see $ROOTSYS/README/CREDITS.             *
0010  *************************************************************************/
0011 
0012 #ifndef ROOT_TMultiLayerPerceptron
0013 #define ROOT_TMultiLayerPerceptron
0014 
0015 #include "TObject.h"
0016 #include "TString.h"
0017 #include "TObjArray.h"
0018 #include "TMatrixD.h"
0019 #include "TNeuron.h"
0020 
0021 class TTree;
0022 class TEventList;
0023 class TTreeFormula;
0024 class TTreeFormulaManager;
0025 
0026 class TMultiLayerPerceptron : public TObject {
0027  friend class TMLPAnalyzer;
0028 
0029  public:
0030    enum ELearningMethod { kStochastic, kBatch, kSteepestDescent,
0031                           kRibierePolak, kFletcherReeves, kBFGS };
0032    enum EDataSet { kTraining, kTest };
0033    TMultiLayerPerceptron();
0034    TMultiLayerPerceptron(const char* layout, TTree* data = nullptr,
0035                          const char* training = "Entry$%2==0",
0036                          const char* test = "",
0037                          TNeuron::ENeuronType type = TNeuron::kSigmoid,
0038                          const char* extF = "", const char* extD  = "");
0039    TMultiLayerPerceptron(const char* layout,
0040                          const char* weight, TTree* data = nullptr,
0041                          const char* training = "Entry$%2==0",
0042                          const char* test = "",
0043                          TNeuron::ENeuronType type = TNeuron::kSigmoid,
0044                          const char* extF = "", const char* extD  = "");
0045    TMultiLayerPerceptron(const char* layout, TTree* data,
0046                          TEventList* training,
0047                          TEventList* test,
0048                          TNeuron::ENeuronType type = TNeuron::kSigmoid,
0049                          const char* extF = "", const char* extD  = "");
0050    TMultiLayerPerceptron(const char* layout,
0051                          const char* weight, TTree* data,
0052                          TEventList* training,
0053                          TEventList* test,
0054                          TNeuron::ENeuronType type = TNeuron::kSigmoid,
0055                          const char* extF = "", const char* extD  = "");
0056    ~TMultiLayerPerceptron() override;
0057    void SetData(TTree*);
0058    void SetTrainingDataSet(TEventList* train);
0059    void SetTestDataSet(TEventList* test);
0060    void SetTrainingDataSet(const char* train);
0061    void SetTestDataSet(const char* test);
0062    void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method);
0063    void SetEventWeight(const char*);
0064    void Train(Int_t nEpoch, Option_t* option = "text", Double_t minE=0);
0065    Double_t Result(Int_t event, Int_t index = 0) const;
0066    Double_t GetError(Int_t event) const;
0067    Double_t GetError(TMultiLayerPerceptron::EDataSet set) const;
0068    void ComputeDEDw() const;
0069    void Randomize() const;
0070    void SetEta(Double_t eta);
0071    void SetEpsilon(Double_t eps);
0072    void SetDelta(Double_t delta);
0073    void SetEtaDecay(Double_t ed);
0074    void SetTau(Double_t tau);
0075    void SetReset(Int_t reset);
0076    inline Double_t GetEta()      const { return fEta; }
0077    inline Double_t GetEpsilon()  const { return fEpsilon; }
0078    inline Double_t GetDelta()    const { return fDelta; }
0079    inline Double_t GetEtaDecay() const { return fEtaDecay; }
0080    TMultiLayerPerceptron::ELearningMethod GetLearningMethod() const { return fLearningMethod; }
0081    inline Double_t GetTau()      const { return fTau; }
0082    inline Int_t GetReset()       const { return fReset; }
0083    inline TString GetStructure() const { return fStructure; }
0084    inline TNeuron::ENeuronType GetType() const { return fType; }
0085    void DrawResult(Int_t index = 0, Option_t* option = "test") const;
0086    Bool_t DumpWeights(Option_t* filename = "-") const;
0087    Bool_t LoadWeights(Option_t* filename = "");
0088    Double_t Evaluate(Int_t index, Double_t* params) const;
0089    void Export(Option_t* filename = "NNfunction", Option_t* language = "C++") const;
0090    void Draw(Option_t *option="") override;
0091 
0092  protected:
0093    void AttachData();
0094    void BuildNetwork();
0095    void GetEntry(Int_t) const;
0096    // it's a choice not to force learning function being const, even if possible
0097    void MLP_Stochastic(Double_t*);
0098    void MLP_Batch(Double_t*);
0099    Bool_t LineSearch(Double_t*, Double_t*);
0100    void SteepestDir(Double_t*);
0101    void ConjugateGradientsDir(Double_t*, Double_t);
0102    void SetGammaDelta(TMatrixD&, TMatrixD&, Double_t*);
0103    bool GetBFGSH(TMatrixD&, TMatrixD &, TMatrixD&);
0104    void BFGSDir(TMatrixD&, Double_t*);
0105    Double_t DerivDir(Double_t*);
0106    Double_t GetCrossEntropyBinary() const;
0107    Double_t GetCrossEntropy() const;
0108    Double_t GetSumSquareError() const;
0109 
0110  private:
0111    TMultiLayerPerceptron(const TMultiLayerPerceptron&); // Not implemented
0112    TMultiLayerPerceptron& operator=(const TMultiLayerPerceptron&); // Not implemented
0113    void ExpandStructure();
0114    void BuildFirstLayer(TString&);
0115    void BuildHiddenLayers(TString&);
0116    void BuildOneHiddenLayer(const TString& sNumNodes, Int_t& layer,
0117                             Int_t& prevStart, Int_t& prevStop,
0118                             Bool_t lastLayer);
0119    void BuildLastLayer(TString&, Int_t);
0120    void Shuffle(Int_t*, Int_t) const;
0121    void MLP_Line(Double_t*, Double_t*, Double_t);
0122 
0123    TTree* fData;                    ///<! pointer to the tree used as datasource
0124    Int_t fCurrentTree;              ///<! index of the current tree in a chain
0125    Double_t fCurrentTreeWeight;     ///<! weight of the current tree in a chain
0126    TObjArray fNetwork;              ///< Collection of all the neurons in the network
0127    TObjArray fFirstLayer;           ///< Collection of the input neurons; subset of fNetwork
0128    TObjArray fLastLayer;            ///< Collection of the output neurons; subset of fNetwork
0129    TObjArray fSynapses;             ///< Collection of all the synapses in the network
0130    TString fStructure;              ///< String containing the network structure
0131    TString fWeight;                 ///< String containing the event weight
0132    TNeuron::ENeuronType fType;      ///< Type of hidden neurons
0133    TNeuron::ENeuronType fOutType;   ///< Type of output neurons
0134    TString fextF;                   ///< String containing the function name
0135    TString fextD;                   ///< String containing the derivative name
0136    TEventList *fTraining;           ///<! EventList defining the events in the training dataset
0137    TEventList *fTest;               ///<! EventList defining the events in the test dataset
0138    ELearningMethod fLearningMethod; ///<! The Learning Method
0139    TTreeFormula* fEventWeight;      ///<! formula representing the event weight
0140    TTreeFormulaManager* fManager;   ///<! TTreeFormulaManager for the weight and neurons
0141    Double_t fEta;                   ///<! Eta - used in stochastic minimisation - Default=0.1
0142    Double_t fEpsilon;               ///<! Epsilon - used in stochastic minimisation - Default=0.
0143    Double_t fDelta;                 ///<! Delta - used in stochastic minimisation - Default=0.
0144    Double_t fEtaDecay;              ///<! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
0145    Double_t fTau;                   ///<! Tau - used in line search - Default=3.
0146    Double_t fLastAlpha;             ///<! internal parameter used in line search
0147    Int_t fReset;                    ///<! number of epochs between two resets of the search direction to the steepest descent - Default=50
0148    Bool_t fTrainingOwner;           ///<! internal flag whether one has to delete fTraining or not
0149    Bool_t fTestOwner;               ///<! internal flag whether one has to delete fTest or not
0150 
0151    ClassDefOverride(TMultiLayerPerceptron, 4) // a Neural Network
0152 };
0153 
0154 #endif