FEDRA emulsion software from the OPERA Collaboration
EdbShowAlg_N3 Class Reference

#include <EdbShowAlg_NN.h>

Inheritance diagram for EdbShowAlg_N3:
Collaboration diagram for EdbShowAlg_N3:

Public Member Functions

 ClassDef (EdbShowAlg_N3, 1)
 
TMultiLayerPerceptron * Create_NN_ALG_MLP (TTree *inputtree, Int_t inputneurons)
 
void CreateANNTree ()
 
 EdbShowAlg_N3 (Bool_t ANN_DoTrain)
 
void Execute ()
 
void Finalize ()
 
TString GetWeightFileString ()
 
void Init ()
 
void Initialize ()
 
void LoadANNWeights ()
 
void LoadANNWeights (TMultiLayerPerceptron *TMlpANN, TString WeightFileString)
 
void Print ()
 
void SetANNWeightString ()
 
void SetWeightFileString (TString WeightFileString)
 
virtual ~EdbShowAlg_N3 ()
 
- Public Member Functions inherited from EdbShowAlg
void AddRecoShowerArray (EdbShowerP *shower)
 
 ClassDef (EdbShowAlg, 1)
 
Double_t DeltaR_NoPropagation (EdbSegP *s, EdbSegP *stest)
 
Double_t DeltaR_WithoutPropagation (EdbSegP *s, EdbSegP *stest)
 
Double_t DeltaR_WithPropagation (EdbSegP *s, EdbSegP *stest)
 
Double_t DeltaTheta (EdbSegP *s1, EdbSegP *s2)
 
Double_t DeltaThetaComponentwise (EdbSegP *s1, EdbSegP *s2)
 
Double_t DeltaThetaSingleAngles (EdbSegP *s1, EdbSegP *s2)
 
 EdbShowAlg ()
 
 EdbShowAlg (TString AlgName, Int_t AlgValue)
 
virtual void Execute ()
 
virtual void Finalize ()
 
TString GetAlgName () const
 
Int_t GetAlgValue () const
 
Double_t GetMinimumDist (EdbSegP *seg1, EdbSegP *seg2)
 
TObjArray * GetRecoShowerArray () const
 
Int_t GetRecoShowerArrayN () const
 
EdbShowerPGetShower (Int_t i) const
 
Double_t GetSpatialDist (EdbSegP *s1, EdbSegP *s2)
 
void Help ()
 
virtual void Initialize ()
 
Bool_t IsInConeTube (EdbSegP *sTest, EdbSegP *sStart, Double_t CylinderRadius, Double_t ConeAngle)
 
void Print ()
 
void PrintAll ()
 
void PrintMore ()
 
void PrintParameters ()
 
void PrintParametersShort ()
 
void SetActualAlgParameterset (Int_t ActualAlgParametersetNr)
 
void SetEdbPVRec (EdbPVRec *Ali)
 
void SetEdbPVRecPIDNumbers (Int_t FirstPlate_eAliPID, Int_t LastPlate_eAliPID, Int_t MiddlePlate_eAliPID, Int_t NumberPlate_eAliPID)
 
void SetInBTArray (TObjArray *InBTArray)
 
void SetParameter (Int_t parNr, Float_t par)
 
void SetParameters (Float_t *par)
 
void SetRecoShowerArray (TObjArray *RecoShowerArray)
 
void SetRecoShowerArrayN (Int_t RecoShowerArrayN)
 
void SetUseAliSub (Bool_t UseAliSub)
 
void Transform_eAli (EdbSegP *InitiatorBT, Float_t ExtractSize)
 
void UpdateShowerIDs ()
 
void UpdateShowerMetaData ()
 
virtual ~EdbShowAlg ()
 

Public Attributes

TString eLayout
 

Private Attributes

Bool_t eANN_DoTrain =kTRUE
 
Int_t eANN_EQUALIZESGBG
 
Int_t eANN_INPUTNEURONS
 
Int_t eANN_Inputtype
 
Double_t eANN_Inputvar [24]
 
Int_t eANN_NHIDDENLAYER
 
Int_t eANN_NTRAINEPOCHS
 
Double_t eANN_OUTPUTTHRESHOLD
 
Double_t eANN_OutputValue =0
 
Int_t eANN_PLATE_DELTANMAX
 
TFile * eANNTrainingsTreeFile
 
TTree * eANNTree
 
TMultiLayerPerceptron * eTMlpANN
 
TString eWeightFileLayoutString
 
TString eWeightFileString
 

Additional Inherited Members

- Protected Member Functions inherited from EdbShowAlg
void Set0 ()
 
- Protected Attributes inherited from EdbShowAlg
Int_t eActualAlgParametersetNr
 
TString eAlgName
 
Int_t eAlgValue
 
EdbPVReceAli
 
EdbPVReceAli_Sub
 
Int_t eAli_SubNpat
 
Int_t eAliNpat
 
Int_t eFirstPlate_eAliPID
 
TObjArray * eInBTArray
 
Int_t eInBTArrayN
 
Int_t eLastPlate_eAliPID
 
Int_t eMiddlePlate_eAliPID
 
Int_t eNumberPlate_eAliPID
 
Int_t eParaN
 
TString eParaString [10]
 
Float_t eParaValue [10]
 
EdbShowerPeRecoShower
 
TObjArray * eRecoShowerArray
 
Int_t eRecoShowerArrayN
 
Int_t eUseAliSub
 

Constructor & Destructor Documentation

◆ EdbShowAlg_N3()

EdbShowAlg_N3::EdbShowAlg_N3 ( Bool_t  ANN_DoTrain)
752{
753 // Constructor with Train/Run Switch
754 Log(2,"EdbShowAlg_N3::EdbShowAlg_N3","Default Constructor ANN_DoTrain=%d",ANN_DoTrain);
755
756 // Reset all:
757 // Calls Set0 from inheriting function, so some values must be reset to NULL
758 // manually, unless a new Set0() function is implemented -- which is not at
759 // the moment:
760 Set0();
761
766
767 // see default.par_SHOWREC for labeling (labeling identical with ShowRec program)
768 eAlgName="N3";
769 eAlgValue=11;
770
771 // Mostly it will be runnung, but can be set now here:
772 eANN_DoTrain=ANN_DoTrain;
773
774 // Init with values according to N3 Alg:
775 Init();
776
777 Log(2,"EdbShowAlg_N3::EdbShowAlg_N3","Default Constructor ANN_DoTrain=%d...done.",ANN_DoTrain);
778}
bool Log(int level, const char *location, const char *fmt,...)
Definition: EdbLog.cxx:75
TString eWeightFileString
Definition: EdbShowAlg_NN.h:149
TString eWeightFileLayoutString
Definition: EdbShowAlg_NN.h:150
TMultiLayerPerceptron * eTMlpANN
Definition: EdbShowAlg_NN.h:152
Bool_t eANN_DoTrain
Definition: EdbShowAlg_NN.h:156
void Init()
Definition: EdbShowAlg_NN.cxx:796
TTree * eANNTree
Definition: EdbShowAlg_NN.h:151
TString eAlgName
Definition: EdbShowAlg.h:49
void Set0()
Definition: EdbShowAlg.cxx:53
Int_t eAlgValue
Definition: EdbShowAlg.h:50
#define NULL
Definition: nidaqmx.h:84

◆ ~EdbShowAlg_N3()

EdbShowAlg_N3::~EdbShowAlg_N3 ( )
virtual
785{
786 // Default Destructor
787 Log(2,"EdbShowAlg_N3::~EdbShowAlg_N3","Default Destructor");
788 if (eANNTree) {
789 delete eANNTree;
790 eANNTree=0;
791 }
792}

Member Function Documentation

◆ ClassDef()

EdbShowAlg_N3::ClassDef ( EdbShowAlg_N3  ,
 
)

◆ Create_NN_ALG_MLP()

TMultiLayerPerceptron * EdbShowAlg_N3::Create_NN_ALG_MLP ( TTree *  inputtree,
Int_t  inputneurons 
)
892{
893 Log(2,"EdbShowAlg_N3::Create_NN_ALG_MLP","Create_NN_ALG_MLP().");
894
895 if (gEDBDEBUGLEVEL>2) cout << "EdbShowAlg_N3::Create_NN_ALG_MLP() inputneurons= " << inputneurons << endl;
896
897 if ( NULL == simu ) {
898 cout << "EdbShowAlg_N3::Create_NN_ALG_MLP() WARNING simu tree is NULL pointer. Return NULL."<< endl;
899 return NULL;
900 }
901
902 // DEBUG START
903 // THIS IS TO BE WRITTEN BETTER, CAUSE THE HANDOVER OF THE PARAMETERS SHOULD
904 // BE BETTER .....
907
908 // TO DO HERE.... TAKE OVER THE CORRECT LAYOUT.
909 // only knowlegde about number of input neurons and hidden layers is needed.
910 cout << "EdbShowAlg_N3::Create_NN_ALG_MLP() eANN_NHIDDENLAYER = " << eANN_NHIDDENLAYER << endl;
911 cout << "EdbShowAlg_N3::Create_NN_ALG_MLP() eANN_INPUTNEURONS = " << eANN_INPUTNEURONS << endl;
912
913 // Create the layout here:
914 TString layout="";
915 TString newstring="";
916 // ANN Input Layer
917 for (Int_t loop=0; loop<eANN_INPUTNEURONS-1; ++loop) {
918 newstring=Form("eANN_Inputvar[%d],",loop);
919 layout += newstring; // "+" works only with TStrings!
920 }
921 newstring=Form("eANN_Inputvar[%d]:",eANN_INPUTNEURONS-1);
923 layout += newstring;
924 // Hidden Layers
925 for (Int_t loop=0; loop<eANN_NHIDDENLAYER; ++loop) {
926 newstring=Form("%d:",eANN_INPUTNEURONS);
928 layout += newstring;
929 }
930 // Output Layer, one output neuron
931 newstring="eANN_Inputtype";
932 layout += newstring;
933
934 // Set Layout String as internal variable now:
935 eLayout = layout;
936
937 // DEBUG MESSAGES:
938 cout << "simu = " << simu << endl;
939 cout << "simu->Show(0): " << endl;
940 simu->Show(0);
941
942 // Create the network:
943 TMultiLayerPerceptron* TMlpANN = new TMultiLayerPerceptron(layout,simu);
944
945 if (gEDBDEBUGLEVEL>2) {
946 cout << "EdbShowAlg_N3::Create_NN_ALG_MLP() GetStructure: " << endl;
947 cout << TMlpANN->GetStructure() << endl;
948 }
949 Log(2,"EdbShowAlg_N3::Create_NN_ALG_MLP","Create_NN_ALG_MLP()...done.");
950 return TMlpANN;
951}
TMultiLayerPerceptron * TMlpANN
Definition: ShowRec.h:340
Int_t eANN_NHIDDENLAYER
Definition: EdbShowAlg_NN.h:174
TString eLayout
Definition: EdbShowAlg_NN.h:192
Int_t eANN_INPUTNEURONS
Definition: EdbShowAlg_NN.h:181
Float_t eParaValue[10]
Definition: EdbShowAlg.h:52
gEDBDEBUGLEVEL
Definition: energy.C:7
TTree * simu
Definition: testBGReduction_By_ANN.C:12

◆ CreateANNTree()

void EdbShowAlg_N3::CreateANNTree ( )
846{
847 Log(2,"EdbShowAlg_N3::CreateANNTree","CreateANNTree()");
848
849 if (!eANNTree) eANNTree = new TTree("EdbShowAlg_N3_eANNTree", "EdbShowAlg_N3_eANNTree");
850
851 // Variables and things important for neural Network:
852 TTree *simu = new TTree("TreeSignalBackgroundBT", "TreeSignalBackgroundBT");
853 eANNTree->Branch("eANN_Inputvar", eANN_Inputvar, "eANN_Inputvar[24]/D");
854 eANNTree->Branch("eANN_Inputtype", &eANN_Inputtype, "eANN_Inputtype/I");
855
856 eANNTree->Print();
857
858 // Default, maximal settings. Same plate, Two plates up- downstream connections looking,
859 // that for 4 inputvariables there
860 // plus 4 fixed input variables for BT(i) to InBT connections: 4+5*4 = 24
861 /*
862 eANN_PLATE_DELTANMAX=5;
863 eANN_NTRAINEPOCHS=100;
864 eANN_NHIDDENLAYER=5;
865 eANN_EQUALIZESGBG=0;
866 eANN_OUTPUTTHRESHOLD=0.85;
867 eANN_INPUTNEURONS=24;
868 eANN_Inputtype=1;
869 */
870
871 cout << "DEBUG: Fill Tree with DUMMY variables --- TO BE CHANGED LATER" << endl;
872 for (int i=0; i<10; ++i) {
873 for (int l=0; l<24; ++l) {
874 eANN_Inputvar[l]=gRandom->Uniform();
875 }
876 eANN_Inputtype=i%2;
877 eANNTree->Fill();
878 }
879 eANNTree->Show(0);
880 eANNTree->Show(eANNTree->GetEntries()-1);
881 cout << "DEBUG: Fill Tree with DUMMY variables --- TO BE CHANGED LATER DONE.." << endl;
882
883 //---------
884 Log(2,"EdbShowAlg_N3::CreateANNTree","CreateANNTree()...done.");
885 return;
886}
Int_t eANN_Inputtype
Definition: EdbShowAlg_NN.h:158
Double_t eANN_Inputvar[24]
Definition: EdbShowAlg_NN.h:157

◆ Execute()

void EdbShowAlg_N3::Execute ( )
virtual

eANN_Inputvar[1] = GetdR(InBT, seg);

Reimplemented from EdbShowAlg.

1023{
1024 Log(2,"EdbShowAlg_N3::Execute","DOING MAIN SHOWER RECONSTRUCTION HERE");
1025
1026 if (eInBTArrayN==0) {
1027 Log(2,"EdbShowAlg_N3::Execute","Warning: No InitiatorBTs in the array. Return now");
1028 return;
1029 }
1030
1031 // TO BE DONE HERE:
1032 // FILL THE ROUTINE WITH THE CODE FROM ShowReco PROGRAM
1033 cout << "EdbShowAlg_N3::Execute()...FILL THE ROUTINE WITH THE CODE FROM ShowReco PROGRAM." << endl;
1034
1035
1036 // Create the root file that contains the trainingsfile tree data first,
1037 // otherwise the trees are not connected with the specified file.
1038 if (eANN_DoTrain==kTRUE) {
1039 eANNTrainingsTreeFile = new TFile(Form("N3_ANN_TrainingsTreeFile.root",0),"RECREATE");
1040 }
1041
1042 // Variables and things important for neural Network:
1043 TTree *eANNTrainingsTree = new TTree("TreeSignalBackgroundBT", "TreeSignalBackgroundBT");
1044 eANNTrainingsTree->Branch("N3_Type", &eANN_Inputtype, "N3_Type/I");
1045 eANNTrainingsTree->Branch("N3_Inputvar", eANN_Inputvar, "N3_Inputvar[24]/D");
1046
1047
1048
1049 EdbSegP* InBT;
1050 EdbSegP* Segment;
1051 EdbSegP* seg;
1052 EdbShowerP* RecoShower;
1053
1054 Bool_t StillToLoop=kTRUE;
1055 Int_t ActualPID;
1056 Int_t newActualPID;
1057 Int_t STEP=-1;
1058 Int_t NLoopedPattern=0;
1060 if (gEDBDEBUGLEVEL>3) cout << "EdbShowAlg_N3::Execute--- STEP for patternloop direction = " << STEP << endl;
1061
1062
1063 Double_t params[30]; // Used for ANN Evaluation // TO BE ADAPTED!!!
1064
1065 //--- Loop over InBTs:
1066 cout << "Loop over InBTs N=" << eInBTArrayN << endl;
1067
1068 // Since eInBTArray is filled in ascending ordering by zpositon
1069 // We use the descending loop to begin with BT with lowest z first.
1070 for (Int_t i=eInBTArrayN-1; i>=0; --i) {
1071
1072 // CounterOutPut
1073 if (gEDBDEBUGLEVEL==2) if ((i%1)==0) cout << eInBTArrayN <<" InBT in total, still to do:"<<Form("%4d",i)<< "\r\r\r\r"<<flush;
1074
1075 //-----------------------------------
1076 // 0a) Reset characteristic variables:
1077 //-----------------------------------
1078
1079 //-----------------------------------
1080 // 1) Make eAli with cut parameters:
1081 //-----------------------------------
1082
1083 // Create new EdbShowerP Object for storage;
1084 // See EdbShowerP why I have to call the Constructor as "unique" ideficable value
1085 //RecoShower = new EdbShowerP(i,eAlgValue);
1086 RecoShower = new EdbShowerP(i,eAlgValue,-1);
1087
1088 // Get InitiatorBT from eInBTArray
1089 InBT=(EdbSegP*)eInBTArray->At(i);
1090 if (gEDBDEBUGLEVEL>2) InBT->PrintNice();
1091
1092 // Clone InBT, because it is modified a lot of times,
1093 // avoid rounding errors by propagating back and forth
1094 EdbSegP* InBTClone = (EdbSegP*)InBT->Clone();
1095
1096 // Add InBT to RecoShower:
1097 // This has to be done, since by definition the first BT in the RecoShower is the InBT.
1098 // Otherwise, also the definition of shower axis and transversal profiles is wrong!
1099 RecoShower -> AddSegment(InBT);
1100 cout << "Segment (InBT) " << InBT << " was added to RecoShower." << endl;
1101
1102 // Transform (make size smaller, extract only events having same MC) the eAli object:
1103 // See in Execute_CA for description.
1104 // Transform_eAli(InBT,1400);
1105 Transform_eAli(InBT,2400);
1106
1107 //-----------------------------------
1108 // 2) Loop over (whole) eAli, check BT for Cuts
1109 //-----------------------------------
1110 ActualPID= InBT->PID() ;
1111 newActualPID= InBT->PID() ;
1112
1113 while (StillToLoop) {
1114 if (gEDBDEBUGLEVEL>3) cout << "EdbShowAlg_N3::Execute--- --- Doing patterloop " << ActualPID << endl;
1115
1116 // just to adapt to this nomenclature of ShowRec program:
1117 int patterloop_cnt=ActualPID;
1118
1119 for (Int_t btloop_cnt=0; btloop_cnt<eAli->GetPattern(ActualPID)->GetN(); ++btloop_cnt) {
1120
1121 Segment = (EdbSegP*)eAli->GetPattern(ActualPID)->GetSegment(btloop_cnt);
1122
1123 // just to adapt to this nomenclature of ShowRec program
1124 seg=Segment;
1125
1126 if (gEDBDEBUGLEVEL>4) Segment->PrintNice();
1127
1128 // Reset characteristic variables:
1129 for (Int_t loop=0; loop<24; ++loop) eANN_Inputvar[loop]=0;
1130
1131 // Now calculate NN Inputvariables: --------------------
1132 // Calculate the first four inputvariables, which depend on InitiatorBT only:
1133 // Important: dT, dMindist is symmetric, dR is NOT!
1134 // Do propagation from InBT to seg! (i.e. to downstream segments)
1135 // Loose PreCuts, in order not to get too much BG BT into trainings sample
1136 eANN_Inputvar[0] = seg->Z()-InBT->Z();
1137 // Update: It is better to use DistToAxis instead of dR, since dR measures
1138 // only distance to InBT without taking care of the direction.
1139 // (does not matter for relatively straight tracks, but for tracks in direction)
1141 //eANN_Inputvar[1] = GetDistToAxis(InBTClone, seg);
1142 cout << "// TO BE CHECKED WHERE THE FUNCTION GetDistToAxis(InBTClone, seg); IS!!" << endl;
1143 // TO BE CHECKED WHERE THE FUNCTION GetDistToAxis(InBTClone, seg); IS!!
1144 if (eANN_DoTrain==kTRUE && eANN_Inputvar[1] > 1200) continue;
1145
1146 //eANN_Inputvar[2] = GetdeltaThetaSingleAngles(InBT, seg);
1147 // TO BE CHECKED WHERE THE FUNCTION GetdeltaThetaSingleAngles(InBT, seg); IS!!
1148 cout << "// TO BE CHECKED WHERE THE FUNCTION GetdeltaThetaSingleAngles(InBT, seg); IS!!" << endl;
1149 if (eANN_DoTrain==kTRUE && eANN_Inputvar[2] > 0.4) continue;
1150
1151 //eANN_Inputvar[3] = GetdMinDist(InBT, seg);
1152 // TO BE CHECKED WHERE THE FUNCTION GetdMinDist(InBT, seg); IS!!
1153 cout << "// TO BE CHECKED WHERE THE FUNCTION GetdMinDist(InBT, seg); IS!!" << endl;
1154 if (eANN_DoTrain==kTRUE && eANN_Inputvar[3] > 800) continue;
1155 // 1)
1156 // 2) .... // 24)
1157 // TO BE FILLED WITH THE CODE FROM SHOWREC PROGRAMM
1158 // end of calculate NN Inputvariables: --------------------
1159
1160 // ---------------------------------------------------------
1161 // Calculate eANN Output now:
1163 // Adapt: array params should have as many entries as there are inputvariables.
1164 // This array is larger than possible used array for evaluation.
1165 // The array with the right size is created, when the eANN_INPUTNEURONS
1166 // variable is fixed (TMLP class demands #arraysize = #inputneurons)
1167 // This is a kind of dump workaround, but for now it should work.
1168 Double_t EvalValue=0;
1169 Double_t N3_Evalvar4[4];
1170 Double_t N3_Evalvar8[8];
1171 Double_t N3_Evalvar12[12];
1172 Double_t N3_Evalvar16[16];
1173 Double_t N3_Evalvar20[20];
1174 Double_t N3_Evalvar24[24];
1175
1176 if (eANN_INPUTNEURONS==4) {
1177 for (int k=0; k<eANN_INPUTNEURONS; ++k) N3_Evalvar4[k]= eANN_Inputvar[k];
1178 eANN_OutputValue=eTMlpANN->Evaluate(0,N3_Evalvar4);
1179 }
1180 else if (eANN_INPUTNEURONS==8) {
1181 for (int k=0; k<eANN_INPUTNEURONS; ++k) N3_Evalvar8[k]= eANN_Inputvar[k];
1182 eANN_OutputValue=eTMlpANN->Evaluate(0,N3_Evalvar8);
1183 }
1184 else if (eANN_INPUTNEURONS==12) {
1185 for (int k=0; k<eANN_INPUTNEURONS; ++k) N3_Evalvar12[k]= eANN_Inputvar[k];
1186 eANN_OutputValue=eTMlpANN->Evaluate(0,N3_Evalvar12);
1187 }
1188 else if (eANN_INPUTNEURONS== 16) {
1189 for (int k=0; k<eANN_INPUTNEURONS; ++k) N3_Evalvar16[k]= eANN_Inputvar[k];
1190 eANN_OutputValue=eTMlpANN->Evaluate(0,N3_Evalvar16);
1191 }
1192 else if (eANN_INPUTNEURONS==20) {
1193 for (int k=0; k<eANN_INPUTNEURONS; ++k) N3_Evalvar20[k]= eANN_Inputvar[k];
1194 eANN_OutputValue=eTMlpANN->Evaluate(0,N3_Evalvar20);
1195 }
1196 else {
1197 for (int k=0; k<eANN_INPUTNEURONS; ++k) N3_Evalvar24[k]= eANN_Inputvar[k];
1198 eANN_OutputValue=eTMlpANN->Evaluate(0,N3_Evalvar24);
1199 }
1200 // ---------------------------------------------------------
1201
1202
1203 // Now apply cut conditions: NN Neural Network Alg --------------------
1204 Double_t value=0;
1205
1206 // These conditions have to be calculated, still!!!
1207 cout << "TO BE DONE" << endl;
1208
1209 value=eTMlpANN->Evaluate(0, params);
1210 if (gEDBDEBUGLEVEL>3) {
1211 cout << "eANN_OutputValue: " << eANN_OutputValue << " Inputvalues: ";
1212 for (int i=0; i<5; i++) cout << " " << eANN_Inputvar[i];
1213 }
1214 if (eANN_OutputValue<eParaValue[1]) continue;
1215 // end of cut conditions: NN Neural Network Alg --------------------
1216
1217
1218
1219 // If we arrive here, Basetrack Segment has passed criteria
1220 // and is then added to the RecoShower:
1221 // Check if its not the InBT which is already added:
1222 if (Segment->X()==InBT->X()&&Segment->Y()==InBT->Y()) {
1223 ; // is InBT, do nothing;
1224 }
1225 else {
1226 RecoShower -> AddSegment(Segment);
1227 }
1228 cout << "Segment " << Segment << " was added to &RecoShower : " << &RecoShower << endl;
1229 } // of btloop_cnt
1230
1231 //------------
1232 newActualPID=ActualPID+STEP;
1233 ++NLoopedPattern;
1234
1235 if (gEDBDEBUGLEVEL>3) cout << "EdbShowAlg_N3::Execute--- --- newActualPID= " << newActualPID << endl;
1236 if (gEDBDEBUGLEVEL>3) cout << "EdbShowAlg_N3::Execute--- --- NLoopedPattern= " << NLoopedPattern << endl;
1237 if (gEDBDEBUGLEVEL>3) cout << "EdbShowAlg_N3::Execute--- --- eNumberPlate_eAliPID= " << eNumberPlate_eAliPID << endl;
1238 if (gEDBDEBUGLEVEL>3) cout << "EdbShowAlg_N3::Execute--- --- StillToLoop= " << StillToLoop << endl;
1239
1240 // This if holds in the case of STEP== +1
1241 if (STEP==1) {
1242 if (newActualPID>eLastPlate_eAliPID) StillToLoop=kFALSE;
1243 if (newActualPID>eLastPlate_eAliPID) cout << "EdbShowAlg_N3::Execute--- ---Stopp Loop since: newActualPID>eLastPlate_eAliPID"<<endl;
1244 }
1245 // This if holds in the case of STEP== -1
1246 if (STEP==-1) {
1247 if (newActualPID<eLastPlate_eAliPID) StillToLoop=kFALSE;
1248 if (newActualPID<eLastPlate_eAliPID) cout << "EdbShowAlg_N3::Execute--- ---Stopp Loop since: newActualPID<eLastPlate_eAliPID"<<endl;
1249 }
1250 // This if holds general, since eNumberPlate_eAliPID is not dependent of the structure of the gAli subject:
1251 if (NLoopedPattern>eNumberPlate_eAliPID) StillToLoop=kFALSE;
1252 if (NLoopedPattern>eNumberPlate_eAliPID) cout << "EdbShowAlg_N3::Execute--- ---Stopp Loop since: NLoopedPattern>eNumberPlate_eAliPID"<<endl;
1253
1254 ActualPID=newActualPID;
1255 } // of // while (StillToLoop)
1256
1257 // Obligatory when Shower Reconstruction is finished!
1258 RecoShower ->Update();
1259 //RecoShower ->PrintBasics();
1260
1261
1262 // Add Shower Object to Shower Reco Array.
1263 // Not, if its empty:
1264 // Not, if its containing only one BT:
1265 if (RecoShower->N()>1) eRecoShowerArray->Add(RecoShower);
1266
1267 // Set back loop values:
1268 StillToLoop=kTRUE;
1269 NLoopedPattern=0;
1270 } // of // for (Int_t i=eInBTArrayN-1; i>=0; --i) {
1271
1272
1273 // Set new value for eRecoShowerArrayN (may now be < eInBTArrayN).
1275
1276 Log(2,"EdbShowAlg_N3::Execute","eRecoShowerArray():Entries = %d",eRecoShowerArray->GetEntries());
1277 Log(2,"EdbShowAlg_N3::Execute","DOING MAIN SHOWER RECONSTRUCTION HERE...done.");
1278 return;
1279}
EdbPattern * GetPattern(int id) const
Definition: EdbPattern.cxx:1721
Definition: EdbSegP.h:21
Float_t X() const
Definition: EdbSegP.h:173
Float_t Z() const
Definition: EdbSegP.h:153
Float_t Y() const
Definition: EdbSegP.h:174
void PrintNice() const
Definition: EdbSegP.cxx:392
Int_t PID() const
Definition: EdbSegP.h:148
EdbSegP * GetSegment(int i) const
Definition: EdbPattern.h:66
TFile * eANNTrainingsTreeFile
Definition: EdbShowAlg_NN.h:153
Double_t eANN_OutputValue
Definition: EdbShowAlg_NN.h:159
void Transform_eAli(EdbSegP *InitiatorBT, Float_t ExtractSize)
Definition: EdbShowAlg.cxx:107
void SetRecoShowerArrayN(Int_t RecoShowerArrayN)
Definition: EdbShowAlg.h:117
TObjArray * eRecoShowerArray
Definition: EdbShowAlg.h:70
Int_t eFirstPlate_eAliPID
Definition: EdbShowAlg.h:65
Int_t eLastPlate_eAliPID
Definition: EdbShowAlg.h:66
Int_t eInBTArrayN
Definition: EdbShowAlg.h:63
TObjArray * eInBTArray
Definition: EdbShowAlg.h:62
EdbPVRec * eAli
Definition: EdbShowAlg.h:59
Int_t eNumberPlate_eAliPID
Definition: EdbShowAlg.h:68
Definition: EdbShowerP.h:28
Int_t N() const
Definition: EdbShowerP.h:412
void Update()
Definition: EdbShowerP.cxx:975
Double_t params[3]
Definition: testBGReduction_By_ANN.C:84

◆ Finalize()

void EdbShowAlg_N3::Finalize ( )
virtual

Reimplemented from EdbShowAlg.

1292{
1293 Log(2,"EdbShowAlg_N3::Finalize","Finalize()");
1294 cout << "TO BE DONE HERE: DELETE THE UNNECESSARY OBJECTS CREATED ON THE HEAP..." << endl;
1295 return;
1296}

◆ GetWeightFileString()

TString EdbShowAlg_N3::GetWeightFileString ( )
inline
203 {
204 return eWeightFileString;
205 }

◆ Init()

void EdbShowAlg_N3::Init ( void  )
797{
798 Log(2,"EdbShowAlg_N3::EdbShowAlg_N3","Init()");
799
800 Log(2,"EdbShowAlg_N3::EdbShowAlg_N3","Init with values according to N3 Alg TO BE CHECKED !!");
801 // Init with values according to N3 Alg:
802 // TO BE CHECKED !!
803 // Structure should be equal to the one in file:
804 // PARAMETERSET_DEFINITIONFILE_N3_ALG.root
805 eParaValue[0]=5;
806 eParaString[0]="ANN_PLATE_DELTANMAX";
807 eParaValue[1]=100;
808 eParaString[1]="ANN_NTRAINEPOCHS";
809 eParaValue[2]=7;
810 eParaString[2]="ANN_NHIDDENLAYER";
811 eParaValue[3]=0.8;
812 eParaString[3]="ANN_OUTPUTTHRESHOLD";
813 eParaValue[4]=0;
814 eParaString[4]="ANN_EQUALIZESGBG";
815 eParaValue[5]=24;
816 eParaString[5]="N3_ANN_INPUTNEURONS";
817
818 cout << "DEBUG::AGAIN, WHERE ARE THE ePARAVALUES SET???????" << endl;
819
820 eWeightFileString="weightsN3.txt";
821 eWeightFileLayoutString="layoutN3";
822
823 // Create Tree where the Variables for the N3 Neural Net are stored:
826
827 // Standard Weights:
830
831 return;
832}
void SetANNWeightString()
Definition: EdbShowAlg_NN.cxx:956
TMultiLayerPerceptron * Create_NN_ALG_MLP(TTree *inputtree, Int_t inputneurons)
Definition: EdbShowAlg_NN.cxx:891
void LoadANNWeights()
Definition: EdbShowAlg_NN.cxx:974
void CreateANNTree()
Definition: EdbShowAlg_NN.cxx:845
TString eParaString[10]
Definition: EdbShowAlg.h:53

◆ Initialize()

void EdbShowAlg_N3::Initialize ( )
virtual

Reimplemented from EdbShowAlg.

838{
839 Log(2,"EdbShowAlg_N3::EdbShowAlg_N3","Initialize()");
840 return;
841}

◆ LoadANNWeights() [1/2]

void EdbShowAlg_N3::LoadANNWeights ( )
975{
976 if (eWeightFileString=="") {
977 cout << "EdbShowAlg_N3::SetANNWeightString IS EMPTY. Reset to default string!" << endl;
978 eWeightFileString="WEIGHTFILESTRING_N3.txt";
979 }
980 eTMlpANN->LoadWeights(eWeightFileString);
981 return;
982}

◆ LoadANNWeights() [2/2]

void EdbShowAlg_N3::LoadANNWeights ( TMultiLayerPerceptron *  TMlpANN,
TString  WeightFileString 
)
989{
990 TMlpANN->LoadWeights(WeightFileString);
991 if (gEDBDEBUGLEVEL>2) cout << "EdbShowAlg_N3::LoadANNWeights " << WeightFileString << endl;
992 return;
993}

◆ Print()

void EdbShowAlg_N3::Print ( )
1000{
1001 Log(2,"EdbShowAlg_N3::Print","Print()");
1002
1003 cout << "Number of Inputvariables: " << eParaValue[5] << endl;
1004 cout << "Algorithm method related inputs:" << endl;
1005 cout << "eANN_PLATE_DELTANMAX " << eParaValue[0] << endl;
1006 cout << "eANN_NTRAINEPOCHS " << eParaValue[1] << endl;
1007 cout << "eANN_NHIDDENLAYER " << eParaValue[2] << endl;
1008 cout << "eANN_OUTPUTTHRESHOLD " << eParaValue[3] << endl;
1009 cout << "eANN_EQUALIZESGBG " << eParaValue[4] << endl;
1010
1011 cout << "Structure of the Net:" << endl;
1012 cout << eLayout.Data() << endl;
1013
1014 Log(2,"EdbShowAlg_N3::Print","Print()...done.");
1015 return;
1016}

◆ SetANNWeightString()

void EdbShowAlg_N3::SetANNWeightString ( )
957{
958 Log(2,"EdbShowAlg_N3::SetANNWeightString","SetANNWeightString()");
959 int inputneurons=eParaValue[5];
960 if (inputneurons==5) eWeightFileString="WEIGHTFILESTRING_N3.txt";
961 // TO DO HERE.... TAKE OVER THE CORRECT WEIGHTFILE STRING.
962 cout << "EdbShowAlg_N3::SetANNWeightString() TO DO HERE.... TAKE OVER THE CORRECT WEIGHTFILE STRING. " << endl;
963
964// if (gEDBDEBUGLEVEL>2)
965 cout << "EdbShowAlg_N3::SetANNWeightString " << eWeightFileString << endl;
966 Log(2,"EdbShowAlg_N3::SetANNWeightString","SetANNWeightString()...done.");
967 return;
968}

◆ SetWeightFileString()

void EdbShowAlg_N3::SetWeightFileString ( TString  WeightFileString)
inline
199 {
200 eWeightFileString=WeightFileString;
201 return;
202 }

Member Data Documentation

◆ eANN_DoTrain

Bool_t EdbShowAlg_N3::eANN_DoTrain =kTRUE
private

◆ eANN_EQUALIZESGBG

Int_t EdbShowAlg_N3::eANN_EQUALIZESGBG
private

◆ eANN_INPUTNEURONS

Int_t EdbShowAlg_N3::eANN_INPUTNEURONS
private

◆ eANN_Inputtype

Int_t EdbShowAlg_N3::eANN_Inputtype
private

◆ eANN_Inputvar

Double_t EdbShowAlg_N3::eANN_Inputvar[24]
private

◆ eANN_NHIDDENLAYER

Int_t EdbShowAlg_N3::eANN_NHIDDENLAYER
private

◆ eANN_NTRAINEPOCHS

Int_t EdbShowAlg_N3::eANN_NTRAINEPOCHS
private

◆ eANN_OUTPUTTHRESHOLD

Double_t EdbShowAlg_N3::eANN_OUTPUTTHRESHOLD
private

◆ eANN_OutputValue

Double_t EdbShowAlg_N3::eANN_OutputValue =0
private

◆ eANN_PLATE_DELTANMAX

Int_t EdbShowAlg_N3::eANN_PLATE_DELTANMAX
private

◆ eANNTrainingsTreeFile

TFile* EdbShowAlg_N3::eANNTrainingsTreeFile
private

◆ eANNTree

TTree* EdbShowAlg_N3::eANNTree
private

◆ eLayout

TString EdbShowAlg_N3::eLayout

◆ eTMlpANN

TMultiLayerPerceptron* EdbShowAlg_N3::eTMlpANN
private

◆ eWeightFileLayoutString

TString EdbShowAlg_N3::eWeightFileLayoutString
private

◆ eWeightFileString

TString EdbShowAlg_N3::eWeightFileString
private

The documentation for this class was generated from the following files: