forked from zhangmeishan/EGN3LDG
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathAPOP.h
103 lines (86 loc) · 1.65 KB
/
APOP.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
/*
* APOP.h
*
* Created on: Jul 20, 2016
* Author: mason
*/
#ifndef APOP_H_
#define APOP_H_
#include "MyLib.h"
#include "Alphabet.h"
#include "Node.h"
#include "Graph.h"
#include "APParam.h"
// for sparse features
struct APParams {
public:
APParam W;
PAlphabet elems;
int nVSize;
int nDim;
public:
APParams() {
nVSize = 0;
nDim = 0;
elems = NULL;
}
inline void exportAdaParams(ModelUpdate& ada) {
ada.addParam(&W);
}
inline void initialWeights(int nOSize) {
if (nVSize == 0){
std::cout << "please check the alphabet" << std::endl;
return;
}
nDim = nOSize;
W.initial(nOSize, nVSize);
}
//random initialization
inline void initial(PAlphabet alpha, int nOSize){
elems = alpha;
nVSize = elems->size();
initialWeights(nOSize);
}
inline int getFeatureId(const string& strFeat){
return elems->from_string(strFeat);
}
};
//only implemented sparse linear node.
//non-linear transformations are not support,
struct APNode : Node {
public:
APParams* param;
vector<int> tx;
public:
APNode() : Node() {
tx.clear();
param = NULL;
}
inline void setParam(APParams* paramInit) {
param = paramInit;
}
inline void clearValue(){
Node::clearValue();
tx.clear();
}
public:
//notice the output
void forward(Graph *cg, const vector<string>& x) {
int featId;
int featSize = x.size();
for (int idx = 0; idx < featSize; idx++) {
featId = param->getFeatureId(x[idx]);
if (featId >= 0){
tx.push_back(featId);
}
}
param->W.value(featId, val, cg->train);
cg->addNode(this);
}
//no output losses
void backward() {
//assert(param != NULL);
param->W.loss(tx, loss);
}
};
#endif /* APOP_H_ */