This repository has been archived by the owner on May 23, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
nnutils.py
59 lines (48 loc) · 1.78 KB
/
nnutils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
import collections
import tensorflow as tf
import numpy as np
def _is_sequence(seq):
return (isinstance(seq, collections.Sequence) and
not isinstance(seq, basestring))
def linear(args, output_size, bias, bias_start=0.0, scope=None):
if args is None or (_is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not _is_sequence(args):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape().as_list() for a in args]
for shape in shapes:
if len(shape) != 2:
raise ValueError("Linear is expecting 2D arguments: %s" %
str(shapes))
if not shape[1]:
raise ValueError("Linear expects shape[1] of arguments: %s" %
str(shapes))
else:
total_arg_size += shape[1]
# Now the computation.
with tf.variable_scope(scope or "Linear"):
matrix = tf.get_variable("Matrix", [total_arg_size, output_size])
if len(args) == 1:
res = tf.matmul(args[0], matrix)
else:
res = tf.matmul(tf.concat(1, args), matrix)
if not bias:
return res
bias_term = tf.get_variable(
"Bias", [output_size],
initializer=tf.constant_initializer(bias_start))
return res + bias_term
def same_shape(s1, s2):
def _as_tuple(s):
if isinstance(s, tf.Tensor):
return tuple(s.get_shape().as_list())
if isinstance(s, np.ndarray):
return s.shape
return tuple(s)
return _as_tuple(s1) == _as_tuple(s2)