Skip to content

Commit

Permalink
add save option for params
Browse files Browse the repository at this point in the history
  • Loading branch information
zm711 committed Nov 30, 2023
1 parent 4629214 commit 5a8bb4b
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 10 deletions.
58 changes: 51 additions & 7 deletions src/spikeanalysis/spike_analysis.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from __future__ import annotations
import json
from typing import Union, Optional

import numpy as np
Expand All @@ -8,7 +9,7 @@
from .stimulus_data import StimulusData
from .analysis_utils import histogram_functions as hf
from .analysis_utils import latency_functions as lf
from .utils import verify_window_format, gaussian_smoothing, NumpyEncoder
from .utils import verify_window_format, gaussian_smoothing, NumpyEncoder, jsonify_parameters


_possible_digital = ("generate_digital_events", "set_trial_groups", "set_stimulus_name")
Expand All @@ -19,16 +20,17 @@
class SpikeAnalysis:
"""Class for spike train analysis utilizing a SpikeData object and a StimulusData object"""

def __init__(self):
def __init__(self, save_parameters: bool = False):
self._file_path = None
self.events = {}
self._save_params = save_parameters

def __repr__(self):
var_methods = dir(self)
var = list(vars(self).keys()) # get our currents variables
methods = list(set(var_methods) - set(var))
final_methods = [method for method in methods if "__" not in method and method[0] != "_"]
final_vars = [current_var for current_var in var if "_" not in current_var]
final_vars = [current_var for current_var in var if "_" not in current_var[:2]]
return f"The methods are: {final_methods} Variables are: {final_vars}"

def set_spike_data(self, sp: SpikeData):
Expand Down Expand Up @@ -162,6 +164,10 @@ def get_raw_psth(
"""

if self._save_params:
parameters = {"get_raw_psth": dict(window=window, time_bin_ms=time_bin_ms)}
jsonify_parameters(parameters, self._file_path)

spike_times = self.raw_spike_times
spike_clusters = self.spike_clusters
cluster_ids = self.cluster_ids
Expand Down Expand Up @@ -259,6 +265,18 @@ def get_raw_firing_rate(
except AttributeError:
raise Exception("Run get_raw_psth before running z_score_data")

if self._save_params:
parameters = {
"get_raw_firing_rate": dict(
time_bin_ms=time_bin_ms,
fr_window=fr_window,
mode=mode,
bsl_window=bsl_window,
sm_time_ms=sm_time_ms,
)
}
jsonify_parameters(parameters, self._file_path)

stim_dict = self._get_key_for_stim()
NUM_STIM = self.NUM_STIM

Expand Down Expand Up @@ -356,6 +374,7 @@ def z_score_data(
time_bin_ms: Union[list[float], float],
bsl_window: Union[list, list[list]],
z_window: Union[list, list[list]],
eps: float = 0,
):
"""
z scores data the psth data
Expand All @@ -373,6 +392,8 @@ def z_score_data(
The event window for finding the z scores/time_bin. Either a single
sequence of (start, end) in relation to stim onset at 0 applied for all stim. Or a list
of lists where each stimulus has its own (start, end)
eps: float, default: 0
Value to prevent nans from occurring during z-scoring
Raises
------
Expand All @@ -389,6 +410,10 @@ def z_score_data(
except AttributeError:
raise Exception("Run get_raw_psth before running z_score_data")

if self._save_params:
parameters = {"z_score_data": dict(time_bin_ms=time_bin_ms, bsl_window=bsl_window, z_window=z_window)}
jsonify_parameters(parameters, self._file_path)

stim_dict = self._get_key_for_stim()
NUM_STIM = self.NUM_STIM

Expand Down Expand Up @@ -441,7 +466,8 @@ def z_score_data(
for trial_number, trial in enumerate(tqdm(trial_set)):
bsl_trial = bsl_psth[:, trials == trial, :]
mean_fr = np.mean(np.sum(bsl_trial, axis=2), axis=1) / ((bsl_current[1] - bsl_current[0]))
std_fr = np.std(np.sum(bsl_trial, axis=2), axis=1) / ((bsl_current[1] - bsl_current[0]))
# for future computations may be beneficial to have small eps to std to prevent divide by 0
std_fr = np.std(np.sum(bsl_trial, axis=2), axis=1) / ((bsl_current[1] - bsl_current[0])) + eps
z_trial = z_psth[:, trials == trial, :] / time_bin_current
z_trials = hf.z_score_values(z_trial, mean_fr, std_fr)
z_scores[stim][:, trials == trial, :] = z_trials[:, :, :]
Expand Down Expand Up @@ -471,6 +497,10 @@ def latencies(self, bsl_window: Union[list, list[float]], time_bin_ms: float = 5
"""

if self._save_params:
parameters = {"latencies": dict(bsl_window=bsl_window, time_bin_ms=time_bin_ms, num_shuffles=num_shuffles)}
jsonify_parameters(parameters, self._file_path)

NUM_STIM = self.NUM_STIM
self._latency_time_bin = time_bin_ms
bsl_windows = verify_window_format(window=bsl_window, num_stim=NUM_STIM)
Expand Down Expand Up @@ -591,6 +621,11 @@ def compute_event_interspike_intervals(self, time_ms: float = 200):
None.
"""

if self._save_params:
parameters = {"compute_event_interspike_interval": dict(time_ms=time_ms)}
jsonify_parameters(parameters, self._file_path)

bins = np.linspace(0, time_ms / 1000, num=int(time_ms + 1))
final_isi = {}
raw_data = {}
Expand Down Expand Up @@ -662,7 +697,10 @@ def trial_correlation(
"""

assert dataset == "psth", "z-score is wip please only use psth for now"
if self._save_params:
parameters = {"trial_correlation": dict(time_bin_ms=time_bin_ms, dataset=dataset)}
jsonify_parameters(parameters, self._file_path)

try:
import pandas as pd
except ImportError:
Expand All @@ -678,7 +716,7 @@ def trial_correlation(

elif dataset == "z_scores":
try:
z_scores = self.z_scores
z_scores = self.raw_zscores
data = z_scores
bins = self.z_bins
except AttributeError:
Expand Down Expand Up @@ -800,6 +838,12 @@ def _generate_sample_z_parameter(self) -> dict:

return example_z_parameter

def save_z_sample_parameters(self, z_parameters: dict):
import json

with open("z_parameters.json", "w") as write_file:
json.dump(z_parameters, write_file)

def get_responsive_neurons(self, z_parameters: Optional[dict] = None):
"""
function for assessing only responsive neurons based on z scored parameters.
Expand Down Expand Up @@ -833,7 +877,7 @@ def get_responsive_neurons(self, z_parameters: Optional[dict] = None):
or dict of response properties in same format "
)

if len(parameter_file) > 0:
if z_parameters is None:
with open("z_parameters.json") as read_file:
z_parameters = json.load(read_file)
else:
Expand Down
11 changes: 8 additions & 3 deletions src/spikeanalysis/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json
from typing import Union
import numpy as np
from pathlib import Path


class NumpyEncoder(json.JSONEncoder):
Expand All @@ -11,17 +12,21 @@ def default(self, obj):
return json.JSONEncoder.default(self, obj)


def jsonify_parameters(parameters: dict):
def jsonify_parameters(parameters: dict, file_path: Path | None = None):
if file_path is not None:
assert file_path.exists()
else:
file_path = Path("")
try:
with open("analysis_parameters.json", "r") as read_file:
with open(file_path / "analysis_parameters.json", "r") as read_file:
old_params = json.load(read_file)
old_params.update(parameters)
new_parameters = old_params

except FileNotFoundError:
new_parameters = parameters

with open("analysis_parameters.json", "w") as write_file:
with open(file_path / "analysis_parameters.json", "w") as write_file:
json.dump(new_parameters, write_file)


Expand Down

0 comments on commit 5a8bb4b

Please sign in to comment.