-
Notifications
You must be signed in to change notification settings - Fork 0
/
connectors.py
422 lines (337 loc) · 16.2 KB
/
connectors.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
import numpy as np
import pandas as pd
import random
import math
all_synapses = None
def init_connectors(total_cells):
global all_synapses
all_synapses = np.zeros([total_cells,total_cells], dtype=bool)
##############################################################################
############################## CONNECT CELLS #################################
def one_to_one(source, target):
sid = source.node_id
tid = target.node_id
if sid == tid:
#print("connecting cell {} to {}".format(sid,tid))
tmp_nsyn = 1
else:
return None
return tmp_nsyn
def one_to_one_offset(source, target, offset=0):
sid = source.node_id
tid = target.node_id - offset
if sid == tid:
#print("connecting cell {} to {}".format(sid,tid))
tmp_nsyn = 1
else:
#print("NOT connecting cell {} to {}".format(sid,tid))
return None
return tmp_nsyn
def syn_dist_delay_feng(source, target):
#if not dist_constraint:
# return 0.1
dt = 0.05
min_delay=0.8 #////define minmum delay,ms
#maxdis=2.425 #/// mm sqrt((1.4)^2+(1.4)^2+(1.4)^2)
#x = float(x_end - x_start)#/1000
#y = float(y_end - y_start)#/1000
#z = float(z_end - z_start)#/1000
max_dist = 2.425#np.sqrt(x**2 + y**2 + z**2)
max_delay=2.425 #////define maximum delay,ms
x_ind,y_ind,z_ind = 0,1,2
dx = target['positions'][x_ind] - source['positions'][x_ind]
dy = target['positions'][y_ind] - source['positions'][y_ind]
dz = target['positions'][z_ind] - source['positions'][z_ind]
dist = np.sqrt(dx**2 + dy**2 + dz**2)/1000
del_fluc = np.random.uniform(-0.1,0.1)
delay=(dist/max_dist)*max_delay+min_delay+del_fluc+dt
return delay
def get_target_sec_id(source,target):
# this simplified target selector could be replaced with something more
# robust using a lookup table for more complicated cell types
if source['pop_name'] == 'PyrA' or source['pop_name'] == 'PyrC':
return 1 # Target Dendrites
elif source['pop_name'] == 'PV':
return 0 # Target Soma
elif source['pop_name'] == 'SOM':
return 0 # Target Soma
elif source['pop_name'] == 'CR':
if target['pop_name'] == 'PyrA' or target['pop_name'] == 'PyrC':
return 0 # Target Soma
else:
return 1 # Target Denditrites
elif source['model_type'] == 'virtual':
return 1 # Target Dendrites
else: # We really don't want a default case so we can catch errors
#return 0 # Target Soma by default
import pdb;pdb.set_trace()
def syn_dist_delay_feng_section(source, target, sec_id=None, sec_x=0.9):
if sec_id is None: # allows for overriding
sec_id = get_target_sec_id(source, target)
return syn_dist_delay_feng(source, target), sec_id, sec_x
def syn_uniform_delay_section(source, target, sec_id=None, sec_x=0.9, mean=0.5,std=1):
if sec_id is None: # allows for overriding
sec_id = get_target_sec_id(source, target)
return np.random.uniform(mean,std), sec_id, sec_x
def syn_percent(source,target,p,track_list=None):
"""
track_list: supply a list to append and track the synapses with
"""
global all_synapses
sid = source.node_id
tid = target.node_id
# No autapses
if sid==tid:
return None
# Do not add synapses if they already exist, we don't want duplicates
#if ((all_synapses.source_gid == sid) & (all_synapses.target_gid == tid)).any():
# return None
if all_synapses[sid][tid]:
return None
if random.random() < p:
#all_synapses = all_synapses.append({'source_gid':sid,'target_gid':tid},ignore_index=True)
all_synapses[sid][tid] = True
if track_list is not None:#we only want to track synapses that may have a recurrent connection, will speed up build time considerably
track_list.append({'source_gid':source['node_id'],'target_gid':target['node_id']})
return 1
else:
return 0
def points_in_cylinder(pt1, pt2, r, q):
#https://stackoverflow.com/questions/47932955/how-to-check-if-a-3d-point-is-inside-a-cylinder
vec = pt2 - pt1
const = r * np.linalg.norm(vec)
c1 = np.dot(q - pt1, vec) >= 0
c2 = np.dot(q - pt2, vec) <= 0
c3 = np.linalg.norm(np.cross(q - pt1, vec),axis=1) <= const
return c1 & c2 & c3
def syn_percent_o2a(source,targets,p,track_list=None,no_recip=False, autapses_allowed=False, angle_dist=False,min_dist=0, max_dist=300, angle_dist_radius=100):
"""
track_list: supply a list to append and track the synapses with
one to all connector for increased speed.
returns a list of len(targets) where the value is the number of synapses at the index=cellid
if no_recip is set to true, any connection that has been made previously and is reciprical won't be considered
"""
original_p = p
global all_synapses
sid = source.node_id
tids = np.array([target.node_id for target in targets])
#List of synapses that will be returned, initialized to 0 synapses
syns = np.array([0 for _ in range(len(targets))])
if not len(tids):
return syns
#Get all existing targets for that source that can't be targetted here
existing = all_synapses[sid]
existing_list = list(np.where(np.any(all_synapses[0]==True, axis=0))[0])
#remove existing synapses from available list
available = tids.copy()
available = available[~np.isin(available,existing_list)]
if not autapses_allowed:
available = available[~np.isin(available,sid)] #remove self from possible targets
if no_recip:
recur = [i['source_gid'] for i in track_list if i['target_gid'] == sid]
available = available[~np.isin(available,recur)]
if source.get('positions') is not None:
src_pos = np.array(source['positions'])
trg_pos = np.array([target['positions'] for target in targets])
euclid_dist = np.linalg.norm(trg_pos - src_pos,axis=1)
euclid_mask_dist = np.array((euclid_dist < max_dist) & (euclid_dist > min_dist))
euclid_dist_available_ind = np.argwhere(euclid_mask_dist).T[0] # Convert to indicies
euclid_dist_available = tids[euclid_dist_available_ind]
default_dist_available = euclid_dist_available
if angle_dist:
# Checks if points are inside a cylinder
src_angle_x = np.array(source['rotation_angle_zaxis'])
src_angle_y = np.array(source['rotation_angle_yaxis'])
vec_pos = np.array([np.cos(src_angle_x), np.sin(src_angle_y), np.sin(src_angle_x)])
pt1 = src_pos + vec_pos * min_dist
pt2 = src_pos + vec_pos * max_dist # Furthest point (max dist away from position of cell)
mask_dist = points_in_cylinder(pt1, pt2, angle_dist_radius, trg_pos)
angle_dist_available_ind = np.argwhere(mask_dist).T[0] # Convert to indicies
angle_dist_available = tids[angle_dist_available_ind]
default_dist_available = angle_dist_available
# We now want to reduce the number available by distance
available = available[np.isin(available,default_dist_available)]
# of those remaining we want p% chosen from all within the distance around in a sphere, no matter angle
n = int(len(available)*p)
extra = 1 if random.random() < (p*100 - math.floor(p*100)) else 0
n = n + extra #This will account for half percentages
if len(available) < n:
n = len(available)
chosen = np.random.choice(available,size=n,replace=False)
mask = np.isin(tids,chosen)
syns[mask] = 1
#Add to lists
#new_syns = pd.DataFrame(chosen,columns=['target_gid'])
#new_syns['source_gid'] = sid
all_synapses[sid][chosen] = True
if track_list is not None:
#track_list = track_list.append(new_syns,ignore_index=True)
for target in chosen:
track_list.append({'source_gid':sid,'target_gid':target})
#any index selected will be set to 1 and returned
return syns
def syn_percent_o2a_old(source,targets,p,track_list=None,no_recip=False, angle_dist=False,min_dist=0, max_dist=300, angle_dist_radius=100, warn=False):
"""
track_list: supply a list to append and track the synapses with
one to all connector for increased speed.
returns a list of len(targets) where the value is the number of synapses at the index=cellid
if no_recip is set to true, any connection that has been made previously and is reciprical won't be considered
"""
original_p = p
global all_synapses
sid = source.node_id
tids = np.array([target.node_id for target in targets])
#List of synapses that will be returned, initialized to 0 synapses
syns = np.array([0 for _ in range(len(targets))])
#Get all existing targets for that source that can't be targetted here
existing = all_synapses[sid]
existing_list = list(np.where(np.any(all_synapses[0]==True, axis=0))[0])
#remove existing synapses from available list
available = tids.copy()
available = available[~np.isin(available,existing_list)]
if no_recip:
recur = [i['source_gid'] for i in track_list if i['target_gid'] == sid]
available = available[~np.isin(available,recur)]
dist = None
mask_dist = None
if source.get('positions') is not None:
src_pos = np.array(source['positions'])
trg_pos = np.array([target['positions'] for target in targets])
#if angle_dist: #https://github.com/latimerb/SPWR_BMTK2/blob/master/build_network.py#L148-L176
# """
# 'finding the perpendicular distance from a three dimensional vector ... the goal was simply
# to calculate the perpendicular distance of the target cell from the source cell’s direction vector...
# the Euclidean distance would be the hypotenuse of that right triangle so the
# perpendicular distance should be the opposite side.
# the way I was thinking about it was to imagine a cylinder with its center around the [directional] vector
# ... and only cells that fall in the cylinder are eligible for connection' - Per Ben
# """
# vec_pos = np.array([np.cos(src_angle_x), np.sin(src_angle_y), np.sin(src_angle_x)])
# dist = np.linalg.norm(np.cross((trg_pos - src_pos), (trg_pos - vec_pos)),axis=1) / np.linalg.norm((vec_pos - src_pos))
if angle_dist:
# Checks if points are inside a cylinder
src_angle_x = np.array(source['rotation_angle_zaxis'])
src_angle_y = np.array(source['rotation_angle_yaxis'])
vec_pos = np.array([np.cos(src_angle_x), np.sin(src_angle_y), np.sin(src_angle_x)])
pt1 = src_pos + vec_pos * min_dist
pt2 = src_pos + vec_pos * max_dist # Furthest point (max dist away from position of cell)
mask_dist = points_in_cylinder(pt1, pt2, angle_dist_radius, trg_pos)
else:
dist = np.linalg.norm(trg_pos - src_pos,axis=1)
mask_dist = np.array((dist < max_dist) & (dist > min_dist))
# Since we cut down on the number of available cells due to distance constraints we need to scale up the p
avg_connected = p*len(targets)
# new p
num_available = len(np.where(mask_dist==True)[0])
if num_available:
p = avg_connected/num_available
else:
p = 1.1
if p > 1:
p = 1
if not angle_dist and warn:
sorted_dist = np.sort(dist)
minimum_max_dist = sorted_dist[int(avg_connected)]
print("Warning: distance constraint (max_dist:" + str(max_dist) + ") between gid " + str(sid) + " and target gids " +
str(min(tids)) + "-" + str(max(tids)) + " prevented " + str(original_p*100) + "% overall connectivity. " +
"To achive this connectivity, max_dist would have needed to be greater than " + str(minimum_max_dist))
# of those remaining we want p% chosen
n = int(len(tids)*p)
extra = 1 if random.random() < (p*100 - math.floor(p*100)) else 0
n = n + extra #This will account for half percentages
if len(available) < n:
n = len(available)
chosen = np.random.choice(available,size=n,replace=False)
mask = np.isin(tids,chosen)
if mask_dist is not None:
mask = mask & mask_dist
chosen = np.where(mask==True)[0]
syns[mask] = 1
#Add to lists
new_syns = pd.DataFrame(chosen,columns=['target_gid'])
new_syns['source_gid'] = sid
all_synapses[sid][new_syns] = True
if track_list is not None:
#track_list = track_list.append(new_syns,ignore_index=True)
for target in chosen:
track_list.append({'source_gid':sid,'target_gid':target})
#any index selected will be set to 1 and returned
return syns
def recurrent_connector(source,target,p,all_edges=[],min_syn=1, max_syn=1):
"""
General logic:
1. Given a *potential* source and target
2. Look through all edges currently made
3. If any of the current edges contains
a. the current source as a previous target of
b. the current target as a prevous source
4. Return number of synapses per this connection, 0 otherwise (no connection)
"""
global all_synapses
sid = source.node_id
tid = target.node_id
# Do not add synapses if they already exist, we don't want duplicates
#if ((all_synapses.source_gid == sid) & (all_synapses.target_gid == tid)).any():
# return None
if all_synapses[sid][tid]:
return None
for e in all_edges: #should probably make this a pandas df to speed up building... and use .any() to search
if sid == e['target_gid'] and tid == e['source_gid']:
#print('found recurrent')
if random.random() < p:
#print('--------------connecting')
#all_synapses = all_synapses.append({'source_gid':sid,'target_gid':tid},ignore_index=True)
all_synapses[sid][tid] = True
return random.randint(min_syn,max_syn)
else:
return 0
return 0
def recurrent_connector_o2a(source,targets,p,all_edges=[],min_syn=1,max_syn=1):
global all_synapses
sid = source.node_id
tids = np.array([target.node_id for target in targets])
#List of synapses that will be returned, initialized to 0 synapses
syns = np.array([0 for _ in range(len(targets))])
#existing = all_synapses[all_synapses.source_gid == sid]
#existing_list = existing[existing.target_gid.isin(tids)].target_gid.tolist()
existing = all_synapses[sid]
existing_list = list(np.where(np.any(all_synapses[0]==True, axis=0))[0])
#remove existing synapses from available list
available = tids.copy()
available = available[~np.isin(available,existing_list)]
#remove any connection that is not in the all_edges list from 'available' list
recur = [i['source_gid'] for i in all_edges if i['target_gid'] == sid]
available = available[np.isin(available,recur)]
#import pdb;pdb.set_trace()
# of those remaining we want p% chosen
n = int(len(available)*p)
extra = 1 if random.random() < (p*100 - math.floor(p*100)) else 0
n = n + extra #This will account for half percentages
chosen = np.random.choice(available,size=n,replace=False)
mask = np.isin(tids,chosen)
syns[mask] = 1
#Add to lists
#new_syns = pd.DataFrame(chosen,columns=['target_gid'])
#new_syns['source_gid'] = sid
#all_synapses = all_synapses.append(new_syns,ignore_index=True)
all_synapses[sid][chosen] = True
#any index selected will be set to 1 and returned
return syns
def background_tone_connector(source,target,offset=0):
sid = source.node_id
tid = target.node_id
offset_tid = tid - offset
if sid == offset_tid:
#print(sid)
return 1
else:
return 0
def rand_shock_connector(source, target, offset, prob): # shock is using a regular gaba syn and needs to be stronger so
sid = source.node_id # so i just put more syns on it
tid = target.node_id
offset_tid = tid - offset
if np.random.uniform() > prob:
return 0
else:
if sid == offset_tid:
return 2