-
Notifications
You must be signed in to change notification settings - Fork 2
/
NetworkArchitectures.cpp
97 lines (90 loc) · 4.35 KB
/
NetworkArchitectures.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
#include "NetworkArchitectures.h"
DeepCNet::DeepCNet(int dimension, int l, int k, ActivationFunction fn,
int nInputFeatures, int nClasses, float p, int cudaDevice,
int nTop)
: SparseConvNet(dimension, nInputFeatures, nClasses, cudaDevice, nTop) {
for (int i = 0; i <= l; i++)
addLeNetLayerMP((i + 1) * k, (i == 0) ? 3 : 2, 1, (i < l) ? 3 : 1,
(i < l) ? 2 : 1, fn, p * i * 1.0f / l);
addSoftmaxLayer();
}
DeepC2::DeepC2(int dimension, int l, int k, ActivationFunction fn,
int nInputFeatures, int nClasses, float p, int cudaDevice,
int nTop)
: SparseConvNet(dimension, nInputFeatures, nClasses, cudaDevice, nTop) {
for (int i = 0; i <= l; i++)
addLeNetLayerMP((i + 1) * k, 2, 1, (i < l) ? 3 : 1, (i < l) ? 2 : 1, fn,
p * i * 1.0f / l);
addSoftmaxLayer();
}
DeepCNiN::DeepCNiN(int dimension, int l, int k, ActivationFunction fn,
int nInputFeatures, int nClasses, float p, int cudaDevice,
int nTop)
: SparseConvNet(dimension, nInputFeatures, nClasses, cudaDevice, nTop) {
for (int i = 0; i <= l; i++) {
addLeNetLayerMP((i + 1) * k, (i == 0) ? 2 : 2, 1, (i < l) ? 3 : 1,
(i < l) ? 2 : 1, fn, p * i * 1.0f / l);
addLeNetLayerMP((i + 1) * k, 1, 1, 1, 1, fn, p * i * 1.0f / l);
}
addSoftmaxLayer();
}
DeepC2C2::DeepC2C2(int dimension, int l, int k, ActivationFunction fn,
int nInputFeatures, int nClasses, float p, int cudaDevice,
int nTop)
: SparseConvNet(dimension, nInputFeatures, nClasses, cudaDevice, nTop) {
for (int i = 0; i < l; i++) {
addLeNetLayerMP((i + 1) * k, 2, 1, 1, 1, fn, p * i * 1.0f / l);
addLeNetLayerMP((i + 1) * k, 2, 1, 3, 2, fn, p * i * 1.0f / l);
}
addLeNetLayerMP((l + 1) * k, 2, 1, 1, 1, fn, p);
addLeNetLayerMP((l + 1) * k, 1, 1, 1, 1, fn, p);
addSoftmaxLayer();
}
POFMPSparseConvNet::POFMPSparseConvNet(int dimension, int l, int k,
float fmpShrink, ActivationFunction fn,
int nInputFeatures, int nClasses,
float p, int cudaDevice, int nTop)
: SparseConvNet(dimension, nInputFeatures, nClasses, cudaDevice, nTop) {
for (int i = 0; i < l; i++) {
addLeNetLayerPOFMP(k * (i + 1), 2, 1, 2, fmpShrink, fn, p * i / (l + 1));
}
addLeNetLayerMP(k * (l + 1), 2, 1, 1, 1, fn, p * l / (l + 1));
addLeNetLayerMP(k * (l + 2), 1, 1, 1, 1, fn, p);
addSoftmaxLayer();
}
ROFMPSparseConvNet::ROFMPSparseConvNet(int dimension, int l, int k,
float fmpShrink, ActivationFunction fn,
int nInputFeatures, int nClasses,
float p, int cudaDevice, int nTop)
: SparseConvNet(dimension, nInputFeatures, nClasses, cudaDevice, nTop) {
for (int i = 0; i < l; i++) {
addLeNetLayerROFMP(k * (i + 1), 2, 1, 2, fmpShrink, fn, p * i / (l + 1));
}
addLeNetLayerMP(k * (l + 1), 2, 1, 1, 1, fn, p * l / (l + 1));
addLeNetLayerMP(k * (l + 2), 1, 1, 1, 1, fn, p);
addSoftmaxLayer();
}
PDFMPSparseConvNet::PDFMPSparseConvNet(int dimension, int l, int k,
float fmpShrink, ActivationFunction fn,
int nInputFeatures, int nClasses,
float p, int cudaDevice, int nTop)
: SparseConvNet(dimension, nInputFeatures, nClasses, cudaDevice, nTop) {
for (int i = 0; i < l; i++) {
addLeNetLayerPDFMP(k * (i + 1), 2, 1, 2, fmpShrink, fn, p * i / (l + 1));
}
addLeNetLayerMP(k * (l + 1), 2, 1, 1, 1, fn, p * l / (l + 1));
addLeNetLayerMP(k * (l + 2), 1, 1, 1, 1, fn, p);
addSoftmaxLayer();
}
RDFMPSparseConvNet::RDFMPSparseConvNet(int dimension, int l, int k,
float fmpShrink, ActivationFunction fn,
int nInputFeatures, int nClasses,
float p, int cudaDevice, int nTop)
: SparseConvNet(dimension, nInputFeatures, nClasses, cudaDevice, nTop) {
for (int i = 0; i < l; i++) {
addLeNetLayerRDFMP(k * (i + 1), 2, 1, 2, fmpShrink, fn, p * i / (l + 1));
}
addLeNetLayerMP(k * (l + 1), 2, 1, 1, 1, fn, p * l / (l + 1));
addLeNetLayerMP(k * (l + 2), 1, 1, 1, 1, fn, p);
addSoftmaxLayer();
}