-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfunctions.py
50 lines (36 loc) · 1.15 KB
/
functions.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import numpy as np
from scipy.special import softmax
np.seterr(over='ignore')
# funzioni di attivazione
def identity(x):
return x
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# derivate funzioni di attivazione
def identity_deriv(x):
return np.ones(x.shape)
def sigmoid_deriv(x):
z = sigmoid(x)
return z * (1 - z)
# funzioni di errore
def sum_of_squares(y, t):
return 0.5 * np.sum(np.power(y - t, 2))
def cross_entropy(y, t, epsilon=1e-15):
y = np.clip(y, epsilon, 1. - epsilon)
return - np.sum(t * np.log(y))
def cross_entropy_softmax(y, t):
softmax_y = softmax(y, axis=0)
return cross_entropy(softmax_y, t)
# derivate funzioni di errore
def sum_of_squares_deriv(y, t):
return y - t
# da verificare
def cross_entropy_deriv(y, t):
return - t / y
def cross_entropy_softmax_deriv(y, t):
softmax_y = softmax(y, axis=0)
return softmax_y - t
activation_functions = [sigmoid, identity]
activation_functions_deriv= [sigmoid_deriv, identity_deriv]
error_functions = [cross_entropy, cross_entropy_softmax, sum_of_squares]
error_functions_deriv = [cross_entropy_deriv, cross_entropy_softmax_deriv, sum_of_squares_deriv]