From 713c683d77fc94a4257c4031b0c51ef4669a3d4a Mon Sep 17 00:00:00 2001 From: Francois Fleuret Date: Sun, 14 Dec 2008 22:46:20 +0100 Subject: [PATCH] automatic commit --- Makefile | 42 ++++ README.txt | 49 ++++ ann.cc | 320 +++++++++++++++++++++++++ doit.sh | 50 ++++ gpl-3.0.txt | 674 ++++++++++++++++++++++++++++++++++++++++++++++++++++ images.cc | 153 ++++++++++++ images.h | 75 ++++++ misc.cc | 37 +++ misc.h | 47 ++++ neural.cc | 412 ++++++++++++++++++++++++++++++++ neural.h | 118 +++++++++ 11 files changed, 1977 insertions(+) create mode 100644 Makefile create mode 100644 README.txt create mode 100644 ann.cc create mode 100755 doit.sh create mode 100644 gpl-3.0.txt create mode 100644 images.cc create mode 100644 images.h create mode 100644 misc.cc create mode 100644 misc.h create mode 100644 neural.cc create mode 100644 neural.h diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..48ecf1b --- /dev/null +++ b/Makefile @@ -0,0 +1,42 @@ + +# mlp-mnist is an implementation of a multi-layer neural network. +# +# Copyright (c) 2008 Idiap Research Institute, http://www.idiap.ch/ +# Written by Francois Fleuret +# +# This file is part of mlp-mnist. +# +# mlp-mnist is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 3 as +# published by the Free Software Foundation. +# +# mlp-mnist is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with mlp-mnist. If not, see . + +ifeq ($(DEBUG),yes) + CXXFLAGS = -Wall -g +else + # Optimized compilation + CXXFLAGS = -Wall -ffast-math -fcaller-saves -finline-functions -funroll-all-loops -O3 +endif + +all: ann + +ann: ann.o misc.o images.o neural.o + $(CXX) $(CXXFLAGS) -o $@ $^ $(LDFLAGS) + +Makefile.depend: *.h *.cc Makefile + $(CC) -M *.cc > Makefile.depend + +clean: + \rm ann *.o Makefile.depend + +archive: + cd .. ; tar zcvf mlp-mnist.tgz mlp-mnist/{*.{cc,h,txt,sh},Makefile} + +-include Makefile.depend diff --git a/README.txt b/README.txt new file mode 100644 index 0000000..cfae48b --- /dev/null +++ b/README.txt @@ -0,0 +1,49 @@ + +You can run the whole script with "./doit --download-mnist" or just +"./doit.sh" if you already have the MNIST database in the current +directory. + +You should get the following output (this takes a few hours on a +1.2Ghz Pentium-M): + +---------------------------------------------------------------------- +Loading the data file ... done. +Database contains 60000 images of resolution 28x28 divided into 10 objects. +Creating a new network (layers of sizes 784 200 10). +Training the network with 20000 training and 20000 validation examples. +0 TRAINING 12235.8 (8.58%) TESTING 13030 (9.405%) +1 TRAINING 8839.31 (6.69%) TESTING 10132.9 (7.71%) +2 TRAINING 6502.38 (4.575%) TESTING 8268.75 (6.235%) +3 TRAINING 5656.71 (3.975%) TESTING 7637.6 (5.75%) +4 TRAINING 5456.68 (3.56%) TESTING 7683.5 (5.6%) [1] +5 TRAINING 4167.26 (2.64%) TESTING 6557.84 (4.82%) +6 TRAINING 4320.34 (2.7%) TESTING 6796.09 (4.89%) [2] +7 TRAINING 3725.38 (2.435%) TESTING 6307.99 (4.52%) +8 TRAINING 3946.58 (2.49%) TESTING 6614.6 (4.53%) [3] +9 TRAINING 3773.16 (2.24%) TESTING 6698.8 (4.67%) [4] +10 TRAINING 3485.74 (2.13%) TESTING 6539.64 (4.54%) +11 TRAINING 5903.21 (3.53%) TESTING 8881.58 (5.905%) [5] +12 TRAINING 3165.84 (1.89%) TESTING 6366.87 (4.385%) +13 TRAINING 3288.64 (2%) TESTING 6520.78 (4.5%) [6] +14 TRAINING 2849.94 (1.615%) TESTING 6201.43 (4.215%) +15 TRAINING 2693.19 (1.555%) TESTING 5991.35 (4.235%) +16 TRAINING 2827.86 (1.575%) TESTING 6181.83 (4.235%) [7] +17 TRAINING 2374.73 (1.355%) TESTING 5668.65 (3.77%) +18 TRAINING 2194.12 (1.255%) TESTING 5572.82 (3.705%) +19 TRAINING 2114.23 (1.155%) TESTING 5587.74 (3.71%) [8] +20 TRAINING 1909.78 (1.15%) TESTING 5377.6 (3.64%) +21 TRAINING 3064.62 (1.705%) TESTING 6642.78 (4.36%) [9] +22 TRAINING 1832.23 (1.04%) TESTING 5386.12 (3.575%) +23 TRAINING 1695.47 (0.95%) TESTING 5342.3 (3.61%) +24 TRAINING 1699.28 (0.935%) TESTING 5331.84 (3.46%) +25 TRAINING 1478.36 (0.835%) TESTING 5075.09 (3.335%) +26 TRAINING 1528.62 (0.865%) TESTING 5221.1 (3.41%) [10] +Saving network simple.mlp ... done. +Loading the data file ... done. +Database contains 10000 images of resolution 28x28 divided into 10 objects. +Loading network simple.mlp ... done (layers of sizes 784 200 10) +Error rate 2599.54 (3.42%) +---------------------------------------------------------------------- + +The computation produces a file simple.mlp containing the learnt +perceptron. diff --git a/ann.cc b/ann.cc new file mode 100644 index 0000000..c3e9e98 --- /dev/null +++ b/ann.cc @@ -0,0 +1,320 @@ +/* + * mlp-mnist is an implementation of a multi-layer neural network. + * + * Copyright (c) 2008 Idiap Research Institute, http://www.idiap.ch/ + * Written by Francois Fleuret + * + * This file is part of mlp-mnist. + * + * mlp-mnist is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 3 as + * published by the Free Software Foundation. + * + * mlp-mnist is distributed in the hope that it will be useful, but + * WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with mlp-mnist. If not, see . + * + */ + +// LeCun et al. 1998: + +// 2-layer NN, 300 hidden units, mean square error 4.70% +// 2-layer NN, 1000 hidden units 4.50% +// 3-layer NN, 300+100 hidden units 3.05% +// 3-layer NN, 500+150 hidden units 2.95% + +/********************************************************************* + + This program, trained on 20,000 (+ 20,000 for the stopping + criterion), tested on the 10,000 of the MNIST test set 100 hidden + neurons, basic network, 3.48% + + TRAINING + + ./ann --nb-training-examples 20000 --nb-validation-examples 20000 \ + --mlp-structure 784,200,10 \ + --data-files ${DATA_DIR}/train-images-idx3-ubyte ${DATA_DIR}/train-labels-idx1-ubyte \ + --save-mlp simple.mlp + + TEST + + ./ann --load-mlp simple.mlp \ + --data-files ${DATA_DIR}/t10k-images-idx3-ubyte ${DATA_DIR}/t10k-labels-idx1-ubyte \ + --nb-test-examples 10000 + +*********************************************************************/ + +#include +#include +#include +#include +#include +#include + +using namespace std; + +#include "images.h" +#include "neural.h" + +#define SMALL_BUFFER_SIZE 1024 + +////////////////////////////////////////////////////////////////////// +// Global Variables +////////////////////////////////////////////////////////////////////// + +int nb_experiment = 0; +int nb_training_examples = 0; +int nb_validation_examples = 0; +int nb_test_examples = 0; +bool save_data = false; + +char images_filename[SMALL_BUFFER_SIZE] = "\0"; +char labels_filename[SMALL_BUFFER_SIZE] = "\0"; +char opt_load_filename[SMALL_BUFFER_SIZE] = "\0"; +char opt_save_filename[SMALL_BUFFER_SIZE] = "\0"; +char opt_layer_sizes[SMALL_BUFFER_SIZE] = "\0"; + +char *next_word(char *buffer, char *r, int buffer_size) { + char *s; + s = buffer; + if(r != NULL) + { + if(*r == '"') { + r++; + while((*r != '"') && (*r != '\0') && + (s= argc) { + cerr << "Missing argument for " << argv[n_opt] << ".\n"; + cerr << "Expecting " << help << ".\n"; + exit(1); + } +} + +void print_help_and_exit(int e) { + cout << "ANN. Written by François Fleuret.\n"; + cout << "$Id: ann.cc,v 1.1 2005-12-13 17:19:11 fleuret Exp $\n"; + cout<< "\n"; + exit(e); +} + +int main(int argc, char **argv) { + + if(argc == 1) print_help_and_exit(1); + + nice(10); + + // Parsing the command line parameters /////////////////////////////// + + int i = 1; + + while(i < argc) { + + if(argc == 1 || strcmp(argv[i], "--help") == 0) print_help_and_exit(0); + + else if(strcmp(argv[i], "--data-files") == 0) { + check_opt(argc, argv, i, 2, " "); + strncpy(images_filename, argv[i+1], SMALL_BUFFER_SIZE); + strncpy(labels_filename, argv[i+2], SMALL_BUFFER_SIZE); + i += 3; + } + + else if(strcmp(argv[i], "--load-mlp") == 0) { + check_opt(argc, argv, i, 1, ""); + strncpy(opt_load_filename, argv[i+1], SMALL_BUFFER_SIZE); + i += 2; + } + + else if(strcmp(argv[i], "--mlp-structure") == 0) { + check_opt(argc, argv, i, 1, ",