Blob Blame Raw
#ifndef NNLAYER_LNK_INC_CPP
#define NNLAYER_LNK_INC_CPP


#include <algorithm>

#include "nnlayer.inc.cpp"


class LayerLink: public Layer {
public:
  int lsize;
  double **wa, **wda;

  LayerLink(Layer &prev, int size, int lsize):
    Layer(&prev, size), lsize(lsize)
  {
    assert(lsize > 0);
    links = wsize = size*lsize;
    w = new double[wsize];
    wa = new double*[wsize*2];
    wda = wa + wsize;
    memset(wa, 0, sizeof(*wa)*wsize*2);
    for(double *iw = w, *e = iw + wsize; iw < e; ++iw)
      *iw = rand()/(double)RAND_MAX*2 - 1;
    memsize += wsize*sizeof(double) + wsize*2*sizeof(double*);
  }

  ~LayerLink()
    { delete[] wa; }

  bool selfCheck() const {
    for(int i = 0; i < wsize; ++i)
      if ( !wa[i] ||  wa[i] < prev->a  ||  wa[i] >= prev->a  + prev->size
       || !wda[i] || wda[i] < prev->da || wda[i] >= prev->da + prev->size )
        return false;
    return true;
  }

  Layer& pass() override {
    double *ia = a;
    double *iw = w;
    double **iwa = wa;
    for(double *e = ia + size; ia < e; ++ia) {
      double s = 0;
      for(double *e = iw + lsize; iw < e; ++iw, ++iwa) {
        assert(*iwa);
        s += *iw * **iwa;
      }
      *ia = 1/(1 + exp(-s)); // sigmoid
    }
    return next ? next->pass() : *this;
  }

  template<bool Deep>
  Layer& backpassT(double trainRatio) {
    double *ia = a;
    double *ida = da;
    double *iw = w;
    double **iwa = wa;
    double **iwda = wda;
    if (Deep) memset(prev->da, 0, sizeof(*prev->da)*prev->size);
    for(double *e = ia + size; ia < e; ++ia, ++ida) {
      double a = *ia;
      double ds = a * (1-a) * *ida; // sigmoid derivation * *ida
      double dst = ds*trainRatio;
      for(double *e = iw + lsize; iw < e; ++iw, ++iwa, ++iwda) {
        assert(*iwa && *iwda);
        if (Deep) **iwda += ds * *iw;
        *iw += dst * **iwa;
      }
    }
    return prev->backpass(trainRatio);
  }

  Layer& backpass(double trainRatio) override
    { return prev->prev ? backpassT<true>(trainRatio) : backpassT<false>(trainRatio); }
};



class LayerLinkConvolution: public LayerLink {
public:
  LayerLinkConvolution(Layer &prev, int psx, int psy, int psz, int sx, int sy, int sz, int lsize):
    LayerLink(prev, sx*sy*sz, lsize*psz)
  {
    assert(psx > 0 && psy > 0 && psz > 0);
    assert(sx > 0 && sy > 0 && sz > 0);
    assert(psx*psy*psz == this->prev->size);
    assert(lsize > 0 && lsize <= psx*psy);

    int hs = (int)sqrt(lsize*1.5) + 2;
    int s = hs*2 + 1;

    struct Point {
      int x, y, r;
      inline bool operator<(const Point &b) const
        { return r < b.r; }
    } *points = new Point[s*s], *p = points;

    int r = 0;
    static const int rnd[] = { 9, 12, 4, 6, 0, 15, 13, 8, 2, 3, 10, 1, 5, 11, 14, 7 };
    for(int y = -hs; y <= hs; ++y)
      for(int x = -hs; x <= hs; ++x, ++r, ++p)
        { p->x = x, p->y = y, p->r = (x*x + y*y)*16 + rnd[r%16]; }
    std::sort(points, p);

    int *order = new int[lsize];

    for(int y = 0; y < sy; ++y) {
      for(int x = 0; x < sx; ++x) {
        int cx = (int)((x + 0.5)/(sx + 1)*(psx + 1));
        int cy = (int)((y + 0.5)/(sy + 1)*(psy + 1));

        p = points;
        for(int l = 0; l < lsize; ++l) {
          int px, py;
          do { px = cx + p->x; py = cy + p->y; ++p; }
            while(px < 0 || py < 0 || px >= psx || py >= psy);
          order[l] = py*psx + px;
        }
        std::sort(order, order + lsize);

        for(int z = 0; z < sz; ++z) {
          for(int l = 0; l < lsize; ++l, ++p) {
            for(int pz = 0; pz < psz; ++pz) {
              int i = (((y*sx + x)*sz + z)*lsize + l)*psz + pz;
              int pi = order[l]*psz + pz;
              assert(i >= 0 && i < wsize);
              assert(pi >= 0 && pi < this->prev->size);
              wa[i] = &this->prev->a[pi];
              wda[i] = &this->prev->da[pi];
            }
          }
        }
      }
    }

    delete[] points;
    delete[] order;
    assert(selfCheck());
  }
};


#endif