-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.cpp
161 lines (134 loc) · 4.04 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
#include <stdlib.h>
#include <iostream>
#include "allheader.h"
#include "network.h"
template <typename T>
T* newArray(int nSize, T val = 0) {
T* arr = new T[nSize];
#pragma omp parallel for
for (int i = 0; i < nSize; i++)
arr[i] = val;
return arr;
}
//// ----------------------------------------------------------------- ////
void test2() {
using namespace Utility;
using namespace std;
using namespace ml;
typedef double T;
Timer<float> timer;
timer.start();
Network<T>* network = new Network<T>();
ILayer<T>* l1 = new Layer<T>(100);
ILayer<T>* l2 = new Layer<T>(200);
ILayer<T>* l3 = new Layer<T>(500);
ILayer<T>* l4 = new Layer<T>(10);
l1->setName("L1");
l2->setName("L2");
l3->setName("L3");
l4->setName("L4");
network->setInputLayer(l1);
network->connect(l1, l2);
network->connect(l2, l3);
network->connect(l3, l4);
network->setOutputLayer(l4);
network->init();
ml::Mat<T> samples(100, 100, 1);
ml::Mat<T> nominals(1, 10, 0);
network->train(samples, nominals);
timer.stop();
cout << timer.getTime() << endl;
}
void test3() {
using namespace std;
using namespace ml;
typedef int T;
Timer<float> timer;
timer.start();
Network<T>* aSubNet = new Network<T>();//.. = something else
aSubNet->setInputLayer(new Layer<T>(50));
aSubNet->connect(aSubNet->getInputLayer(), new Layer<T>(1000));
aSubNet->setOutputLayer(aSubNet->getInputLayer()->siblings[0]);
aSubNet->setName("SubNet 1");
Network<T>* network = new Network<T>();
ILayer<T>* l1 = new Layer<T>(100);
ILayer<T>* l2 = new Layer<T>(200);
ILayer<T>* l3 = new Layer<T>(500);
ILayer<T>* l4 = aSubNet;
l1->setName("L1");
l2->setName("L2");
l3->setName("L3");
l4->setName("L4");
network->setInputLayer(l1);
network->connect(l1, l2);
network->connect(l2, l3);
network->connect(l3, l4);
network->connect(l3, l2); // Sigmoid(l1 * W1 + l3 * W4) -> L2 output
network->connect(l4, l2);
network->setOutputLayer(l4);
network->init();
ml::Mat<T> input(1, 100, 1);
network->feed(input);
/*
network->init();
for sample in samples:
out = network->feed(sample);
// todo: back prop out into network
*/
timer.stop();
cout << timer.getTime() << endl;
}
void test_crazy_network_1() {
using namespace std;
using namespace ml;
typedef int T;
Timer<float> timer;
timer.start();
// Defining network
Network<T>* network = new Network<T>();
ILayer<T>* l1 = new Layer<T>(100, "L1");
ILayer<T>* l2 = new Layer<T>(200, "L2");
ILayer<T>* l3 = new Layer<T>(500, "L3");
// Defining subnet, middle node is actually l2. sorta recurrent
Network<T>* aSubNet = new Network<T>();//.. = something else
// Defining rest of network
ILayer<T>* l4 = aSubNet;
ILayer<T>* l5 = new Layer<T>(2, "L5");
l4->setName("N4");
network->setInputLayer(l1);
network->connect(l1, l2);
network->connect(l2, l3);
network->connect(l3, l4);
network->connect(l3, l2); // note: circularity back to l2
network->connect(l4, l2); // here again as well
network->connect(l4, l5); // note: connecting subnet arbitrarily to another layer..
network->setOutputLayer(l5);
// Proper way to finish defining a subnet after completely defining which
// network owns the recurrent layers (l2 is owned by parent network..)
{
ILayer<T>* s1 = new Layer<T>(100, "S1");
ILayer<T>* s3 = new Layer<T>(5000, "S3");
aSubNet->setInputLayer(s1);
aSubNet->connect(s1, l2);
aSubNet->connect(l2, s3);
aSubNet->setOutputLayer(s3);
aSubNet->setName("SubNet 1");
}
// Finish defining parent network and init.. feed.. train..
network->init();
ml::Mat<T> input(1, 100, 1); // input vec must be the same size as the input layer's size
network->feed(input);
ml::Mat<T> samples(100, 100, 1);
ml::Mat<T> nominals(1, 2, 0);
// network->train(samples, nominals);
timer.stop();
cout << timer.getTime() << endl;
}
int main() {
//test1();
test2();
test3();
test_crazy_network_1();
return 0;
}
//// ----------------------------------------------------------------- ////