-
-///////////////////////////////////////////////////////////////////////////
-// This program is free software: you can redistribute it and/or modify //
-// it under the terms of the version 3 of the GNU General Public License //
-// as published by the Free Software Foundation. //
-// //
-// This program is distributed in the hope that it will be useful, but //
-// WITHOUT ANY WARRANTY; without even the implied warranty of //
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU //
-// General Public License for more details. //
-// //
-// You should have received a copy of the GNU General Public License //
-// along with this program. If not, see <http://www.gnu.org/licenses/>. //
-// //
-// Written by Francois Fleuret //
-// (C) Idiap Research Institute //
-// //
-// Contact <francois.fleuret@idiap.ch> for comments & bug reports //
-///////////////////////////////////////////////////////////////////////////
+/*
+ * folded-ctf is an implementation of the folded hierarchy of
+ * classifiers for object detection, developed by Francois Fleuret
+ * and Donald Geman.
+ *
+ * Copyright (c) 2008 Idiap Research Institute, http://www.idiap.ch/
+ * Written by Francois Fleuret <francois.fleuret@idiap.ch>
+ *
+ * This file is part of folded-ctf.
+ *
+ * folded-ctf is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 3 as
+ * published by the Free Software Foundation.
+ *
+ * folded-ctf is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with folded-ctf. If not, see <http://www.gnu.org/licenses/>.
+ *
+ */
+
+/*
+
+ An implementation of the classifier with a decision tree. Each node
+ simply thresholds one of the component, and is chosen for maximum
+ loss reduction locally during training. The leaves are labelled with
+ the classifier response, which is chosen again for maximum loss
+ reduction.
+
+ */
#ifndef DECISION_TREE_H
#define DECISION_TREE_H
class DecisionTree : public Classifier {
+ static const int min_nb_samples_for_split = 5;
+
int _feature_index;
scalar_t _threshold;
scalar_t _weight;
DecisionTree *_subtree_lesser, *_subtree_greater;
- static const int min_nb_samples_for_split = 5;
-
void pick_best_split(SampleSet *sample_set,
scalar_t *loss_derivatives);