-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathFCLayer.cpp
More file actions
103 lines (83 loc) · 2.17 KB
/
FCLayer.cpp
File metadata and controls
103 lines (83 loc) · 2.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
//
// Created by Filip Lux on 24.11.16.
//
#include <iostream>
#include <vector>
#include "FCLayer.h"
// fullConnected layer
FCLayer::FCLayer(int &inputs, int &neurons, Layer* lower) { //creates layer, number of inputs and neurons
n = neurons;
in = inputs;
down = lower;
input = down->out;
down->ou = n;
ddot = new double[n];
out = new double[n];
w = new double[in*n];
bias = new double[n];
for (int i = 0; i < in*n; ++i) { //randomly initializes weights
w[i] = fRand(INIT_MIN,INIT_MAX);
}
for (int i = 0; i < n; ++i) { //randomly initializes weights
bias[i] = fRand(INIT_MIN,INIT_MAX);
}
}
FCLayer::FCLayer(int &inputs, int &neurons) {
n = neurons;
in = inputs;
ddot = new double[n];
out = new double[n];
w = new double[in*n];
bias = new double[n];
for (int i = 0; i < in*n; ++i) { //randomly initializes weights
w[i] = fRand(INIT_MIN,INIT_MAX);
}
for (int i = 0; i < n; ++i) { //randomly initializes weights
bias[i] = fRand(INIT_MIN,INIT_MAX);
}
};
FCLayer::~FCLayer() {
delete bias;
delete ddot;
delete out;
delete w;
};
void FCLayer::forward_layer() { //step forward with activation function
for (int i = 0; i < n; i++) {
out[i] = bias[i];
for (int j = 0; j < in; j++) {
out[i] += w[i*in+j] * input[j];
}
out[i] = sigma(out[i]);
}
}
void FCLayer::backProp_layer() {
for (int i = 0; i < n; i++) {
down_ddot[i] = 0;
for (int j = 0; j < ou; j++) {
down_ddot[i] += ddot[j] * w[i+j*ou];
}
}
}
void FCLayer::computeError(double* result) {
for (int i = 0; i < n; i++) {
ddot[i] = (out[i] - result[i]) * out[i] * (1-out[i]);
}
}
void FCLayer::print() {
std::cout << "layer weights:" << std::endl;
for (int i = 0; i < n*n+1; i++) {
std::cout << "w" << i << ": " << w[i] << ", ";
}
std::cout << std::endl;
}
void FCLayer::learn() {
for (int i = 0; i < n; i++) {
for (int j = 0; j<in; j++) {
w[i*in+j] -= ddot[i] * input[j] * LR;
}
}
}
void FCLayer::update_input(double* in) {
input = in;
};