automatic commit
[folded-ctf.git] / decision_tree.h
1
2 ///////////////////////////////////////////////////////////////////////////
3 // This program is free software: you can redistribute it and/or modify  //
4 // it under the terms of the version 3 of the GNU General Public License //
5 // as published by the Free Software Foundation.                         //
6 //                                                                       //
7 // This program is distributed in the hope that it will be useful, but   //
8 // WITHOUT ANY WARRANTY; without even the implied warranty of            //
9 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU      //
10 // General Public License for more details.                              //
11 //                                                                       //
12 // You should have received a copy of the GNU General Public License     //
13 // along with this program. If not, see <http://www.gnu.org/licenses/>.  //
14 //                                                                       //
15 // Written by Francois Fleuret, (C) IDIAP                                //
16 // Contact <francois.fleuret@idiap.ch> for comments & bug reports        //
17 ///////////////////////////////////////////////////////////////////////////
18
19 #ifndef DECISION_TREE_H
20 #define DECISION_TREE_H
21
22 #include "misc.h"
23 #include "classifier.h"
24 #include "sample_set.h"
25 #include "loss_machine.h"
26
27 class DecisionTree : public Classifier {
28
29   int _feature_index;
30   scalar_t _threshold;
31   scalar_t _weight;
32
33   DecisionTree *_subtree_lesser, *_subtree_greater;
34
35   static const int min_nb_samples_for_split = 5;
36
37   void pick_best_split(SampleSet *sample_set,
38                        scalar_t *loss_derivatives);
39
40   void train(LossMachine *loss_machine,
41              SampleSet *sample_set,
42              scalar_t *current_responses,
43              scalar_t *loss_derivatives,
44              int depth);
45
46 public:
47
48   DecisionTree();
49   ~DecisionTree();
50
51   int nb_leaves();
52   int depth();
53
54   scalar_t response(SampleSet *sample_set, int n_sample);
55
56   void train(LossMachine *loss_machine,
57              SampleSet *sample_set,
58              scalar_t *current_responses);
59
60   void tag_used_features(bool *used);
61   void re_index_features(int *new_indexes);
62
63   void read(istream *is);
64   void write(ostream *os);
65 };
66
67 #endif