2 * folded-ctf is an implementation of the folded hierarchy of
3 * classifiers for object detection, developed by Francois Fleuret
6 * Copyright (c) 2008 Idiap Research Institute, http://www.idiap.ch/
7 * Written by Francois Fleuret <francois.fleuret@idiap.ch>
9 * This file is part of folded-ctf.
11 * folded-ctf is free software: you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License version 3 as
13 * published by the Free Software Foundation.
15 * folded-ctf is distributed in the hope that it will be useful, but
16 * WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with folded-ctf. If not, see <http://www.gnu.org/licenses/>.
25 #include "classifier_reader.h"
26 #include "fusion_sort.h"
28 #include "boosted_classifier.h"
31 BoostedClassifier::BoostedClassifier(int nb_weak_learners) {
32 _nb_weak_learners = nb_weak_learners;
36 BoostedClassifier::BoostedClassifier() {
37 _nb_weak_learners = 0;
41 BoostedClassifier::~BoostedClassifier() {
43 for(int w = 0; w < _nb_weak_learners; w++)
44 delete _weak_learners[w];
45 delete[] _weak_learners;
49 scalar_t BoostedClassifier::response(SampleSet *sample_set, int n_sample) {
51 for(int w = 0; w < _nb_weak_learners; w++) {
52 r += _weak_learners[w]->response(sample_set, n_sample);
58 void BoostedClassifier::train(LossMachine *loss_machine,
59 SampleSet *sample_set, scalar_t *train_responses) {
62 cerr << "Can not re-train a BoostedClassifier" << endl;
66 int nb_pos = 0, nb_neg = 0;
68 for(int s = 0; s < sample_set->nb_samples(); s++) {
69 if(sample_set->label(s) > 0) nb_pos++;
70 else if(sample_set->label(s) < 0) nb_neg++;
73 _weak_learners = new DecisionTree *[_nb_weak_learners];
75 (*global.log_stream) << "With " << nb_pos << " positive and " << nb_neg << " negative samples." << endl;
77 for(int w = 0; w < _nb_weak_learners; w++) {
79 _weak_learners[w] = new DecisionTree();
80 _weak_learners[w]->train(loss_machine, sample_set, train_responses);
82 for(int n = 0; n < sample_set->nb_samples(); n++)
83 train_responses[n] += _weak_learners[w]->response(sample_set, n);
85 (*global.log_stream) << "Weak learner " << w
86 << " depth " << _weak_learners[w]->depth()
87 << " nb_leaves " << _weak_learners[w]->nb_leaves()
88 << " train loss " << loss_machine->loss(sample_set, train_responses)
93 (*global.log_stream) << "Built a classifier with " << _nb_weak_learners << " weak_learners." << endl;
96 void BoostedClassifier::tag_used_features(bool *used) {
97 for(int w = 0; w < _nb_weak_learners; w++)
98 _weak_learners[w]->tag_used_features(used);
101 void BoostedClassifier::re_index_features(int *new_indexes) {
102 for(int w = 0; w < _nb_weak_learners; w++)
103 _weak_learners[w]->re_index_features(new_indexes);
106 void BoostedClassifier::read(istream *is) {
108 cerr << "Can not read over an existing BoostedClassifier" << endl;
112 read_var(is, &_nb_weak_learners);
113 _weak_learners = new DecisionTree *[_nb_weak_learners];
114 for(int w = 0; w < _nb_weak_learners; w++) {
115 _weak_learners[w] = new DecisionTree();
116 _weak_learners[w]->read(is);
117 (*global.log_stream) << "Read tree " << w << " of depth "
118 << _weak_learners[w]->depth() << " with "
119 << _weak_learners[w]->nb_leaves() << " leaves." << endl;
123 << "Read BoostedClassifier containing " << _nb_weak_learners << " weak learners." << endl;
126 void BoostedClassifier::write(ostream *os) {
128 id = CLASSIFIER_BOOSTED;
131 write_var(os, &_nb_weak_learners);
132 for(int w = 0; w < _nb_weak_learners; w++)
133 _weak_learners[w]->write(os);