Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

iblphotometry integration #749

Open
wants to merge 6 commits into
base: iblrigv8dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions iblrig/test/test_transfers.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,19 +145,21 @@ def create_fake_data(self):
raw_photometry_df.to_csv(folder_neurophotometrics / 'raw_photometry.csv', index=False)

def test_copier(self):
session = _create_behavior_session(ntrials=50, kwargs=self.session_kwargs)
# session = _create_behavior_session(ntrials=50, kwargs=self.session_kwargs)
self.create_fake_data()

# the workaround to find the settings.yaml
with mock.patch('iblrig.path_helper._load_settings_yaml') as mocker:
mocker.side_effect = self.side_effect
# the actual code to test
iblrig.neurophotometrics.init_neurophotometrics_subject(
session_stub=session.paths['SESSION_FOLDER'],
session_stub=f'test_subject/{datetime.today().strftime("%Y-%m-%d")}/001',
rois=['Region00', 'Region01'],
locations=['VTA', 'SNc'],
)
iblrig.neurophotometrics.copy_photometry_subject(session.paths['SESSION_FOLDER'])
# iblrig.neurophotometrics.copy_photometry_subject(session.paths['SESSION_FOLDER'])
(sc,) = iblrig.commands.transfer_data(tag='neurophotometrics')
self.assertEqual(sc.state, 2)


class TestIntegrationTransferExperiments(TestIntegrationTransferExperimentsBase):
Expand Down
43 changes: 15 additions & 28 deletions iblrig/transfer_experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,9 @@
from pathlib import Path

import numpy as np
import pandas as pd
import pandera

import ibllib.pipes.misc
import iblphotometry.io as fpio
import iblrig
import one.alf.path as alfiles
from ibllib.io import raw_data_loaders, session_params
Expand Down Expand Up @@ -679,11 +678,17 @@ def neurophotometrics_description(
description['fibers'] = {roi: {'location': location} for roi, location in zip(rois, locations, strict=False)}
return {'neurophotometrics': description}

def _copy_collections(self, folder_neurophotometric: Path) -> bool:
def _copy_collections(self, folder_neurophotometric: Path | None = None) -> bool:
ed = self.experiment_description['neurophotometrics']
dt = datetime.datetime.fromisoformat(ed['datetime'])
# Here we find the first photometry folder after the start_time. In case this is failing
# we can feed a custom start_time to go to the desired folder, or just rename the folder
# FIXME TODO
folder_neurophotometric = (
self.session_path.parents[4].joinpath('neurophotometrics')
if folder_neurophotometric is None
else folder_neurophotometric
)
folder_day = next(folder_neurophotometric.glob(ed['datetime'][:10]), None)
assert folder_day is not None, f"Neurophotometrics folder {folder_neurophotometric} doesn't contain data"
folder_times = list(folder_day.glob('T*'))
Expand All @@ -694,32 +699,14 @@ def _copy_collections(self, folder_neurophotometric: Path) -> bool:
csv_digital_inputs = folder_day.joinpath(f'T{hhmmss[i]}', 'digital_inputs.csv')
assert csv_raw_photometry.exists(), f'Raw photometry file {csv_raw_photometry} not found'
assert csv_digital_inputs.exists(), f'Digital inputs file {csv_digital_inputs} not found'

# Copy the raw and digital inputs files to the server
# TODO move this into a data loader ? Especially the schemas will apply to both the csv and parquet format
df_raw_photometry = pd.read_csv(csv_raw_photometry)
df_digital_inputs = pd.read_csv(csv_digital_inputs, header=None)
df_digital_inputs.columns = ['ChannelName', 'Channel', 'AlwaysTrue', 'SystemTimestamp', 'ComputerTimestamp']
# this will ensure the columns are present, and that there was no magic new format on a new Bonsai version
schema_raw_data = pandera.DataFrameSchema(
columns=dict(
FrameCounter=pandera.Column(pandera.Int64),
SystemTimestamp=pandera.Column(pandera.Float64),
LedState=pandera.Column(pandera.Int16, coerce=True),
ComputerTimestamp=pandera.Column(pandera.Float64),
**{k: pandera.Column(pandera.Float64) for k in ed['fibers']},
)
)
schema_digital_inputs = pandera.DataFrameSchema(
columns=dict(
ChannelName=pandera.Column(str, coerce=True),
Channel=pandera.Column(pandera.Int8, coerce=True),
AlwaysTrue=pandera.Column(bool, coerce=True),
SystemTimestamp=pandera.Column(pandera.Float64),
ComputerTimestamp=pandera.Column(pandera.Float64),
)
)
df_raw_photometry = schema_raw_data.validate(df_raw_photometry)
df_digital_inputs = schema_digital_inputs.validate(df_digital_inputs)
# read in and
df_raw_photometry = fpio.from_raw_neurophotometrics_file_to_raw_df(csv_raw_photometry, validate=False)
# explicitly explicitly with the data from the experiment description file
cols = ed['fibers'].keys()
df_raw_photometry = fpio.validate_neurophotometrics_df(df_raw_photometry, data_columns=cols)
df_digital_inputs = fpio.read_digital_inputs_csv(csv_digital_inputs, validate=True)
remote_photometry_path = self.remote_session_path.joinpath(ed['collection'])
remote_photometry_path.mkdir(parents=True, exist_ok=True)
df_raw_photometry.to_parquet(remote_photometry_path.joinpath('_neurophotometrics_fpData.raw.pqt'))
Expand Down
25 changes: 21 additions & 4 deletions pdm.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ dependencies = [
"iblqt>=0.3.1",
"ONE-api>=2.11.1",
"tycmd-wrapper>=0.2.1",
"iblphotometry @ git+https://github.com/int-brain-lab/ibl-photometry@main",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

please package for pypi - we try to avoid adding dependencies from github

#
# Everything else
"annotated-types>=0.7.0",
Expand Down
Loading