Blob Blame Raw


#include <ctime>

#include "layer.all.inc.cpp"
#include "test.all.inc.cpp"
#include "train.digit.inc.cpp"
#include "train.image.inc.cpp"
#include "train.cx4.inc.cpp"
#include "benchmark.inc.cpp"
#include "benchmark.segment.inc.cpp"


bool runTests() {
 if (!AllTest::test()) return false;
 return printf("success\n"), true;
}


bool trainDigits() {
  #define FILENAME "data/output/weights-digit.bin"

  printf("create neural network\n");
  Layer l(                    nullptr, Layout(28, 28) );
  (new LayerSimple<funcSigmoidExp>( l, Layout(256)   ))->filename = FILENAME "1";
  (new LayerSimple<funcSigmoidExp>( l, Layout(64)    ))->filename = FILENAME "2";
  (new LayerSimple<funcSigmoidExp>( l, Layout(10)    ))->filename = FILENAME "3";
  l.sumStat().print();
  
  #undef FILENAME

  printf("load training data\n");
  TrainerDigit t;
  if (!t.loadSymbolMap("data/symbols-data.bin")) return 1; // 28x28

  printf("try load previously saved network\n"); l.load();
  t.configure(l, 0.5, 8, 70000, 0, 0, 0.00001).run();
  
  return true;
}


bool trainDigitsConv() {
  #define FILENAME "data/output/weights-digit-conv.bin"

  printf("create neural network\n");
  Layer l(nullptr, Layout(28, 28));
  Layer *ll[10] = {};
  ll[1] = new LayerConvShared<funcReLU>(l, Layout(12, 12, 6), Kernel(4, 2, 0)); ll[1]->filename = FILENAME "1";
  ll[2] = new LayerConvShared<funcReLU>(l, Layout(4, 4, 12), Kernel(4, 2, 0)); ll[2]->filename = FILENAME "2";
  ll[3] = new LayerSimple<funcSigmoidExp>(l, Layout(64)); ll[3]->filename = FILENAME "3";
  ll[4] = new LayerSimple<funcSigmoidExp>(l, Layout(10)); ll[4]->filename = FILENAME "4";

  
  #undef FILENAME
  l.sumStat().print();

  printf("load training data\n");
  TrainerDigit t;
  if (!t.loadSymbolMap("data/symbols-data.bin")) return 1; // 28x28

  printf("try load previously saved network\n"); l.load();

  //ll[1]->skipTrain = true;
  //ll[2]->skipTrain = true;
  
  t.configure(l, 0.01, 8, 70000, 0, 0, 0.00001).run();
  //t.configure(l, 0.5, 8, 70000, 0, 0, 0.00001).run();
  
  return true;
}


bool trainImage() {
  #define FILENAME "data/output/weights-image.bin"

  printf("create neural network\n");
  Layer l(nullptr, Layout(128, 128, 3));
  Layer *ll[20] = {};
  ll[ 1] = new LayerConvShared<funcReLU>(l, Layout(63, 63, 24), Kernel(4, 2, 0)); ll[1]->filename = FILENAME "1";
  ll[ 2] = new LayerConvShared<funcReLU>(l, Layout(29, 29, 48), Kernel(5, 2, 0)); ll[2]->filename = FILENAME "2";
  //ll[ 3] = new LayerConvShared<funcReLU>(l, Layout(14, 14, 24), Kernel(4, 2, 0)); ll[3]->filename = FILENAME "3";
  //ll[ 4] = new LayerConvShared<funcReLU>(l, Layout( 6,  6, 48), Kernel(4, 2, 0)); ll[4]->filename = FILENAME "4";
  //ll[ 5] = new LayerConvShared<funcReLU>(l, Layout( 2,  2, 96), Kernel(4, 2, 0)); ll[5]->filename = FILENAME "5";
  //ll[ 6] = new LayerDeconvShared<funcReLU>(l, Layout(  6,  6, 48), Kernel(4, 2, 0), ll[5]->weights);
  //ll[ 7] = new LayerDeconvShared<funcReLU>(l, Layout( 14, 14, 24), Kernel(4, 2, 0), ll[4]->weights);
  //ll[ 8] = new LayerDeconvShared<funcReLU>(l, Layout( 30, 30, 12), Kernel(4, 2, 0), ll[3]->weights);
  ll[ 9] = new LayerDeconvShared<funcReLU>(l, Layout( 63, 63, 24), Kernel(5, 2, 0), ll[2]->weights);
  ll[10] = new LayerDeconvShared<funcReLU>(l, Layout(128, 128, 3), Kernel(4, 2, 0), ll[1]->weights);

  l.sumStat().print();

  printf("try load previously saved network\n"); l.load();

  ll[1]->skipTrain = true;
  ll[10]->skipTrain = true;

  
  TrainerImage t;
  t.pad = 16;
  t.datafile = "data/img128-data.bin";
  t.outfile = FILENAME ".test";

  t.configure(l, 0.00001, 8, 1000, 0, 0, 0.00001).run();
  
  #undef FILENAME
  return true;
}


bool trainCx4() {
  #define FILENAME "data/output/weights-cx4.bin"

  printf("create neural network\n");
  Layer l(nullptr, Layout(512, 512, 3).expandXY(2));
  Layer *fl[20] = { &l };
  int cnt = 1;
  fl[cnt] = new LayerConvShared<funcReLU>(l, Layout(257, 257, 24).expandXY(3), Kernel(4, 2, -2)); fl[cnt]->filename = FILENAME "1"; ++cnt;
  fl[cnt] = new LayerConvShared<funcReLU>(l, Layout(130, 130, 48), Kernel(4, 2, -2)); fl[cnt]->filename = FILENAME "2"; ++cnt;
  //fl[cnt] = new LayerConvShared<funcReLU>(l, Layout( 66,  66, 96), Kernel(4, 2, -2)); fl[cnt]->filename = FILENAME "3"; ++cnt;
  //fl[cnt] = new LayerConvShared<funcReLU>(l, Layout( 6,  6, 48), Kernel(4, 2,  0)); fl[cnt]->filename = FILENAME "4"; ++cnt;
  //fl[cnt] = new LayerConvShared<funcReLU>(l, Layout( 2,  2, 96), Kernel(4, 2,  0)); fl[cnt]->filename = FILENAME "5"; ++cnt;
  for(int i = cnt-1; i > 0; --i) {
    Layer *bl = new LayerDeconvShared<funcReLU>(l, fl[i-1]->layout, dynamic_cast<LayerConvShared<funcReLU>*>(fl[i])->kernel, fl[i]->weights);
    if (i < cnt-1) fl[i]->skipTrain = bl->skipTrain = true;
  }

  l.sumStat().print();

  printf("try load previously saved network\n"); l.load();

  SegmentCx4 s(fl[cnt-2]->layout.getD(), fl[cnt-1]->layout.getD(), fl[cnt-1]->weights);
  s.filename = fl[cnt-1]->filename;

  TrainerCx4 t;
  t.layerFull        = &l;
  t.layerPre         = cnt > 2 ? fl[cnt-2] : nullptr;
  t.segment          = &s;
  t.ratio            = 0.000001;
  t.threadsCount     = 8;
  t.measuresPerBlock = 1000;
  t.trainsPerBlock   = 100000;
  t.loadImagesCount  = 100;
  t.blocksPerLoading = 1;
  t.qmin             = 0.00001;
  t.infile           = "data/img512-data.bin";
  t.outfile          = FILENAME ".test";
  
  t.run();
  
  #undef FILENAME
  return true;
}


int main() {
  srand(time(NULL));

  //while(1) BenchmarkSegment::run(8);
  //while(1) Benchmark().run(8);
  //return !runTests();
  //return !trainDigits();
  //return !trainDigitsConv();
  //return !trainImage();
  return !trainCx4();
}