From 036a8f096baf6e7100e4f86b6a32b96e8827089a Mon Sep 17 00:00:00 2001 From: Ivan Mahonin Date: Mar 16 2023 11:49:53 +0000 Subject: neural: fix iterators --- diff --git a/projects/neural/layer.conv.inc.cpp b/projects/neural/layer.conv.inc.cpp index c0b6e82..02ffb6c 100644 --- a/projects/neural/layer.conv.inc.cpp +++ b/projects/neural/layer.conv.inc.cpp @@ -230,7 +230,7 @@ public: iterateConvolutionPoint(mtLayouts[barrier.tid], prev->layout, layout, kernel, kx, ky, neurons, prev->neurons, weights); barrier.wait(); } - iterateNeurons(prev->mtLayouts[barrier.tid], prev->neurons); + iterateNeurons(mtPrevLayouts[barrier.tid], prev->neurons); } @@ -281,7 +281,7 @@ public: int k_sx = kernel.sx, k_sy = kernel.sy; for(int kx = 0; kx < k_sx; ++kx) for(int ky = 0; ky < k_sy; ++ky) { - iterateConvolutionPoint(prev->mtLayouts[barrier.tid], layout, prev->layout, kernel, kx, ky, prev->neurons, neurons, weights); + iterateConvolutionPoint(mtPrevLayouts[barrier.tid], layout, prev->layout, kernel, kx, ky, prev->neurons, neurons, weights); barrier.wait(); } iterateNeurons(mtLayouts[barrier.tid], neurons); @@ -293,7 +293,7 @@ public: static inline void init(Neuron &n, AccumType &a) { a.v = n.v; } static inline void iter(Neuron &n, Weight &w, AccumType &a) { w.w += n.d * a.v; } }; - iterateConvolution(prev->mtLayouts[barrier.tid], prev->layout, layout, kernel, neurons, prev->neurons, weights); + iterateConvolution(mtPrevLayouts[barrier.tid], prev->layout, layout, kernel, neurons, prev->neurons, weights); } @@ -303,7 +303,7 @@ public: static inline void iter(Neuron &n, Weight &w, AccumType &a) { a.v += n.d * w.w; } static inline void done(Neuron &n, AccumType &a) { n.d *= a.v; } }; - iterateConvolution(prev->mtLayouts[barrier.tid], prev->layout, layout, kernel, neurons, prev->neurons, weights); + iterateConvolution(mtPrevLayouts[barrier.tid], prev->layout, layout, kernel, neurons, prev->neurons, weights); } diff --git a/projects/neural/layer.inc.cpp b/projects/neural/layer.inc.cpp index ad7c516..0e7b891 100644 --- a/projects/neural/layer.inc.cpp +++ b/projects/neural/layer.inc.cpp @@ -10,6 +10,7 @@ #include #include +#include #include @@ -118,6 +119,7 @@ public: Stat stat; Layout::List mtLayouts; + Layout::List mtPrevLayouts; Layer(Layer *prev, const Layout &layout, int weightsCount = 0, Weight *weights = nullptr): @@ -209,14 +211,22 @@ public: } - virtual void split(int threadsCount) - { layout.split(mtLayouts, threadsCount); } + virtual void split(int threadsCount) { + layout.split(mtLayouts, threadsCount); + if (prev) prev->layout.split(mtPrevLayouts, threadsCount); + } virtual void pass(Barrier &barrier) { } virtual void backpassWeights(Barrier &barrier) { } virtual void backpassDeltas(Barrier &barrier) { } virtual void testPass() { } virtual void testBackpass() { } + + + virtual void clGetThreadsData(std::vector &data) { } + virtual void clGetPassProgram(std::string &text) { } + virtual void clGetBackpassWeightsProgram(std::string &text) { } + virtual void clGetBackpassDeltasProgram(std::string &text) { } }; diff --git a/projects/neural/train.inc.cpp b/projects/neural/train.inc.cpp index 8806713..a99b9f3 100644 --- a/projects/neural/train.inc.cpp +++ b/projects/neural/train.inc.cpp @@ -1,5 +1,5 @@ -#ifndef NNTRAIN_INC_CPP -#define NNTRAIN_INC_CPP +#ifndef TRAIN_INC_CPP +#define TRAIN_INC_CPP #include diff --git a/projects/neural/trainer.cpp b/projects/neural/trainer.cpp index 51149f5..d8f8424 100644 --- a/projects/neural/trainer.cpp +++ b/projects/neural/trainer.cpp @@ -16,7 +16,7 @@ bool runTests() { int main() { srand(time(NULL)); - //return !runTests(); + return !runTests(); #define FILENAME "data/output/weights.bin" // 28x28