Blame simple/neural/nnlayer.inc.cpp

56d550
#ifndef NNLAYER_INC_CPP
56d550
#define NNLAYER_INC_CPP
56d550
56d550
56d550
#include <cmath></cmath>
56d550
#include <cstdio></cstdio>
56d550
#include <cstdlib></cstdlib>
56d550
#include <cstring></cstring>
56d550
#include <cassert></cassert>
56d550
56d550
56d550
56d550
class Layer {
56d550
public:
56d550
  Layer *prev, *next;
53488e
  size_t memsize;
025224
  int size, wsize, links;
025224
  double *a, *da, *w;
56d550
025224
  Layer(Layer *prev, int size):
53488e
    prev(), next(), memsize(), size(size), wsize(), links(), w()
56d550
  {
56d550
    assert(size > 0);
025224
    a = new double[size*2];
025224
    da = a + size;
025224
    memset(a, 0, sizeof(*a)*size*2);
56d550
    if (prev) (this->prev = &prev->back())->next = this;
53488e
    memsize += size*2*sizeof(double);
56d550
  }
56d550
56d550
  virtual ~Layer() {
56d550
    if (next) delete next;
56d550
    if (prev) prev->next = nullptr;
56d550
    delete[] a;
025224
    if (w) delete[] w;
56d550
  }
56d550
56d550
  virtual Layer& pass()
56d550
    { return next ? next->pass() : *this; }
025224
  virtual Layer& backpass(double trainRatio)
025224
    { return prev ? prev->backpass(trainRatio) : *this; }
025224
025224
  inline Layer& front()
025224
    { Layer *l = this; while(l->prev) l = l->prev; return *l; }
025224
  inline Layer& back()
025224
    { Layer *l = this; while(l->next) l = l->next; return *l; }
025224
53488e
  inline size_t totalMemSize() const
53488e
    { size_t s = 0; for(const Layer *l = this; l; l = l->next) s += l->memsize; return s; }
025224
  inline int totalSize() const
025224
    { int c = 0; for(const Layer *l = this; l; l = l->next) c += l->size; return c; }
025224
  inline int totalLinks() const
025224
    { int c = 0; for(const Layer *l = this; l; l = l->next) c += l->links; return c; }
025224
025224
  bool toStream(FILE *f)
025224
    { return (!w || fwrite(w, sizeof(double)*wsize, 1, f)) && (!next || next->toStream(f)); }
025224
  bool fromStream(FILE *f)
025224
    { return (!w || fread (w, sizeof(double)*wsize, 1, f)) && (!next || next->fromStream(f)); }
56d550
56d550
  bool save(const char *filename) {
56d550
    assert(!prev);
56d550
    FILE *f = fopen(filename, "wb");
56d550
    if (!f) return printf("cannot open file '%s' for write\n", filename), false;
56d550
    if (!toStream(f)) return printf("cannot write to file '%s'\n", filename), fclose(f), false;
56d550
    fclose(f);
56d550
    return 1;
56d550
  }
56d550
56d550
  bool load(const char *filename) {
56d550
    assert(!prev);
56d550
    FILE *f = fopen(filename, "rb");
56d550
    if (!f) return printf("cannot open file '%s' for read\n", filename), false;
56d550
    if (!fromStream(f)) return printf("cannot read from file '%s'\n", filename), fclose(f), false;
56d550
    fclose(f);
56d550
    return 1;
56d550
  }
56d550
025224
  double trainPass(double ratio, double *x, double *y, double qmin) {
56d550
    assert(!prev);
025224
    memcpy(a, x, sizeof(*a)*size);
56d550
    Layer &b = pass();
56d550
56d550
    double qmax = 0;
56d550
    for(double *pa = b.a, *pda = b.da, *e = pa + b.size; pa < e; ++pa, ++pda, ++y) {
025224
      assert(*pa == *pa);
56d550
      double d = *y - *pa;
56d550
      *pda = d;
025224
      //qmax += d*d;
56d550
      double q = fabs(d);
56d550
      if (qmax < q) qmax = q;
56d550
    }
025224
    //qmax = sqrt(qmax/b.size);
025224
    if (qmax > qmin)
025224
      b.backpass(ratio);
56d550
025224
    //if (qmax < 1e-6) {
025224
    //  printf("strange:\n");
025224
    //  y -= b.size;
025224
    //  for(double *pa = b.a, *e = pa + b.size; pa < e; ++pa, ++y)
025224
    //    printf("%f - %f = %f\n", *y, *pa, *y - *pa);
025224
    //}
56d550
025224
    return qmax;
56d550
  }
56d550
};
56d550
56d550
025224
class LayerSimple: public Layer {
56d550
public:
025224
  LayerSimple(Layer &prev, int size):
025224
    Layer(&prev, size)
56d550
  {
025224
    links = wsize = size * this->prev->size;
025224
    w = new double[wsize];
025224
    double k = 1.0/this->prev->size;
025224
    for(double *iw = w, *e = iw + wsize; iw < e; ++iw)
025224
      *iw = (rand()/(double)RAND_MAX*2 - 1)*k;
53488e
    memsize += wsize*sizeof(double);
56d550
  }
56d550
56d550
  Layer& pass() override {
025224
    double *pa = prev->a, *ee = pa + prev->size;
025224
    double *iw = w;
025224
    for(double *ia = a, *e = ia + size; ia < e; ++ia) {
025224
      double s = 0;
025224
      for(double *ipa = pa; ipa < ee; ++ipa, ++iw)
025224
        s += *ipa * *iw;
025224
      *ia = 1/(1 + exp(-s)); // sigmoid
025224
      //*ia = s > 0 ? s : 0; // RaLU
56d550
    }
56d550
    return next ? next->pass() : *this;
56d550
  }
56d550
025224
  Layer& backpass(double trainRatio) override {
025224
    double *pa = prev->a, *pda = prev->da, *ee = pa + prev->size;
025224
    double *iw = w;
56d550
    if (prev->prev) {
025224
      memset(pda, 0, sizeof(*prev->da) * prev->size);
025224
      for(double *ia = a, *ida = da, *e = ia + size; ia < e; ++ia, ++ida) {
025224
        double a = *ia;
025224
        double ds = a * (1-a) * *ida; // sigmoid derivation * ida
025224
        //if (!*ia) continue; // RaLU derivation is zero
025224
        //double ds = *ida;
025224
        double dst = ds*trainRatio;
025224
        for(double *ipa = pa, *ipda = pda; ipa < ee; ++ipa, ++ipda, ++iw) {
025224
          *ipda += ds * *iw;
025224
          *iw += dst * *ipa;
56d550
        }
56d550
      }
56d550
    } else {
025224
      for(double *ia = a, *ida = da, *e = ia + size; ia < e; ++ia, ++ida) {
025224
        double a = *ia;
025224
        double dst = a * (1-a) * *ida * trainRatio; // sigmoid derivation * ida * trainRatio
025224
        //if (!*ia) continue; // RaLU derivation is zero
025224
        //double dst = *ida * trainRatio;
025224
        for(double *ipa = pa; ipa < ee; ++ipa, ++iw)
025224
          *iw += dst * *ipa;
56d550
      }
56d550
    }
025224
    return prev->backpass(trainRatio);
56d550
  }
56d550
};
56d550
56d550
56d550
#endif