[[ "${TORCH_NB_THREADS}" ]] || echo "You can set \$TORCH_NB_THREADS to the proper value (default 1)."
[[ "${TORCH_USE_GPU}" ]] || echo "You can set \$TORCH_USE_GPU to 'yes' or 'no' (default 'no')."
-[[ "${DYNCNN_DATA_DIR}" ]] || DYNCNN_DATA_DIR="./data/10p-mg"
+[[ "${DYNCNN_DATA_DIR}" ]] || DYNCNN_DATA_DIR="./data/10p-mg"
[[ "${DYNCNN_RUNDIR}" ]] || DYNCNN_RUNDIR="./results"
-######################################################################
-# Create the data-set if the directory does not exist
-
if [[ ! -d "${DYNCNN_DATA_DIR}" ]]; then
- cat <<EOF
-***************************************************************************
- Generate the data-set
-***************************************************************************
-EOF
-
+ # Creating the data-base
make -j -k
mkdir -p "${DYNCNN_DATA_DIR}"
- # 17 frames every 16 is two frames: t+0, t+16
+ # 17 frames every 16 is two frames: t+0 and t+16
./flatland 40000 \
--nb_shapes 10 \
- --multi_grasp --every_nth 16 --nb_frames 17 \
+ --random_grasp --every_nth 16 --nb_frames 17 \
--dir "${DYNCNN_DATA_DIR}"
fi
-######################################################################
-# Train the model (takes 15h on a GTX 1080 with cuda 8.0, cudnn 5.1,
+# Train the model (takes 30h on a GTX 1080 with cuda 8.0, cudnn 5.1,
# and recent torch)
-if [[ ! -f "${DYNCNN_RUNDIR}"/model_1000.t7 ]]; then
- cat <<EOF
-***************************************************************************
- Train the model (should take a while)
-***************************************************************************
-EOF
+if [[ ! -f "${DYNCNN_RUNDIR}"/scheme_02000.t7 ]]; then
./dyncnn.lua -rundir "${DYNCNN_RUNDIR}"
fi
-######################################################################
# Create the images of internal activations using the current.t7 in
# the rundir
-cat <<EOF
-***************************************************************************
- Save the internal activation images
-***************************************************************************
-EOF
-
-for n in 2 12; do
- ./dyncnn.lua -rundir "${DYNCNN_RUNDIR}" -noLog -exampleInternals "${n}"
-done
+./dyncnn.lua -rundir "${DYNCNN_RUNDIR}" -noLog -exampleInternals 3,7
######################################################################
# Plot the loss curves if gnuplot is here