diff --git a/.github/workflows/PR-test.yml b/.github/workflows/PR-test.yml
new file mode 100644
index 000000000..a11fa567a
--- /dev/null
+++ b/.github/workflows/PR-test.yml
@@ -0,0 +1,33 @@
+name: PR Test
+
+on: push
+
+jobs:
+ run-tests:
+ runs-on: ${{ matrix.os }}
+ strategy:
+ matrix:
+ os: [ubuntu-latest] #, macos-latest, windows-latest]
+ env:
+ OS: ${{ matrix.os }}
+ PYTHON: '3.8'
+ steps:
+ - name: Cancel Workflow Action
+ uses: styfle/cancel-workflow-action@0.6.0
+ with:
+ access_token: ${{ github.token }}
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.8
+ - name: Set up conda environment
+ uses: conda-incubator/setup-miniconda@v2
+ with:
+ activate-environment: rec_to_nwb
+ environment-file: environment.yml
+ - name: Install rec_to_nwb
+ shell: bash -l {0}
+ run: |
+ pip install -e .
diff --git a/rec_to_nwb/.gitignore b/.gitignore
similarity index 68%
rename from rec_to_nwb/.gitignore
rename to .gitignore
index 70867d141..286461ed3 100644
--- a/rec_to_nwb/.gitignore
+++ b/.gitignore
@@ -20,6 +20,8 @@ parts/
sdist/
var/
wheels/
+pip-wheel-metadata/
+share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
@@ -38,6 +40,7 @@ pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
+.nox/
.coverage
.coverage.*
.cache
@@ -72,9 +75,20 @@ target/
# Jupyter Notebook
.ipynb_checkpoints
+# IPython
+profile_default/
+ipython_config.py
+
# pyenv
.python-version
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
# celery beat schedule file
celerybeat-schedule
@@ -102,9 +116,18 @@ venv.bak/
# mypy
.mypy_cache/
-
-# idea pycharm
-../.idea/
-
-#testexample
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+.DS_Store
+*.mat
+*.lock
+*.dirlock
+.vscode
+*.dat
+*.xml
+*.h264
+*.mda
*.nwb
diff --git a/README.md b/README.md
index 6d9e5ab3c..bf745c33f 100644
--- a/README.md
+++ b/README.md
@@ -1,28 +1,12 @@
# rec_to_nwb
+[![PR Test](https://github.com/LorenFrankLab/rec_to_nwb/actions/workflows/PR-test.yml/badge.svg)](https://github.com/LorenFrankLab/rec_to_nwb/actions/workflows/PR-test.yml)
# About
rec_to_nwb is a python conda package for converting SpikeGadgets rec files to NWB files.
It converts experiment data from `/raw` or `/preprocessing` folder to `.nwb` file. It utilizes rec_to_binaries package for preprocessing phase.
-# Prerequisites
-## For users
-1. Install Spike Gadgets
-
-2. Add SpikeGadgets to path.
- If Spike Gadgets is in default location:
- ```bash
- export PATH="$HOME/SpikeGadgets/:$PATH"
- ```
-3. Download miniconda from
-
-4. Install rec_to_nwb package:
- ```bash
- conda install -c conda-forge -c novelakrk rec_to_nwb
- ```
-5. Install Jupyter notebook
- ```bash
- pip install jupyter notebook
- ```
+# Instructions:
+Currently we suggest following the instructions to install https://github.com/LorenFrankLab/franklabnwb, as that includes additional files that are helpful, but you can install this using the instructions below.
## For developers
1. Install Spike Gadgets
@@ -32,21 +16,21 @@ It converts experiment data from `/raw` or `/preprocessing` folder to `.nwb` fil
```bash
export PATH="$HOME/SpikeGadgets/:$PATH"
```
-3. Download miniconda from
-
+3. Install anaconda or miniconda if you haven't already.
+
4. clone repository
```bash
- git clone https://github.com/NovelaNeuro/rec_to_nwb.git
+ git clone https://github.com/LorenFrankLab/rec_to_nwb.git
- cd rec_to_nwb/rec_to_nwb
+ cd rec_to_nwb
```
5. Create conda environment.
```bash
conda env create -f environment.yml
```
-6. jupyter notebook installation
+6. Install rec_to_nwb
```bash
- pip install jupyter notebook
+ pip install -e .
```
7. Documentation can be viewed at
@@ -63,27 +47,31 @@ It converts experiment data from `/raw` or `/preprocessing` folder to `.nwb` fil
jupyter notebook nwb_generation.ipynb
```
4. Metadata.yml description:
- ```
- # general information about the experiment
- experimenter name: Alison Comrie
+
+ Important note: right now the code assumes that the electrode groups listed below (each of which corresponds to one or more NTrode in the file) are in ascending order by NTrode number. If this is not the case the data could be scrambled. Thus, the first listed electrode group should correspond to, for example, NTrode 1 (or perhaps NTrodes 1-4) while the second would correspond to NTrode 2 (or 5-8), etc.
+
+
+ ```yaml
+ # general information about the experiment
+ experimenter_name: Alison Comrie
lab: Loren Frank
institution: University of California, San Francisco
- experiment description: Reinforcement learning
- session description: Reinforcement leaarning
+ experiment_description: Reinforcement learning
+ session_description: Reinforcement leaarning
session_id: beans_01
subject:
description: Long Evans Rat
genotype: Wild Type
sex: Male
species: Rat
- subject id: Beans
+ subject_id: Beans
weight: Unknown
#Units of analog and behavioral_events
units:
analog: 'unspecified'
behavioral_events: 'unspecified'
- #data acq device used in experiment
- data acq device:
+ #data_acq_device used in experiment
+ data_acq_device:
- name: acq_0
system: sample_system
amplifier: sample_amplifier
@@ -129,19 +117,18 @@ It converts experiment data from `/raw` or `/preprocessing` folder to `.nwb` fil
associated_video_files:
- name: 20190718_beans_01_s1.1.h264
camera_id : 0
- # Times period multiplier is used in pos/mda invalid/valid times, to multiply the period when detecting gaps,
- to avoid creating invalid times in case of only small deviations. (optional parameter, default 1.5)
+ # Times period multiplier is used in pos/mda invalid/valid times, to multiply the period when detecting gaps, to avoid creating invalid times in case of only small deviations. (optional parameter, default 1.5)
times_period_multiplier: 1.5
# Din/Dout events which filter out files from DIO data in data directory. Each name has to be unique. Stored in behavioral_events section in output nwb file.
- behavioral_events:
+ behavioral_events:
- name: Poke2
description: Din2
# Device name. Stored in output nwb file.
- device:
+ device:
name:
- Trodes
- # Electrode Groups list used in experiment. Each Id has to be unique, device_type has to refer to existing device_type in probe.yml. Target_x,y,z fields describe the specified location where this group should be. Possible value of units: 'um' or 'mm'
- electrode groups:
+ # Electrode Groups list used in experiment. Each Id has to be unique, device_type has to refer to existing device_type in probe.yml. Target_x,y,z fields describe the specified location where this group should be. Possible value of units: 'um' or 'mm'
+ electrode_groups:
- id: 0
location: mPFC
device_type: 128c-4s8mm6cm-20um-40um-sl
@@ -163,8 +150,8 @@ It converts experiment data from `/raw` or `/preprocessing` folder to `.nwb` fil
# Ntrodes list which refer 1:1 to elements from xml header existing in rec binary file.
# ntrode_id has to match to SpikeNTrode id, electrode_group_id refers to electrode group,
# bad_channels is a list of broken channels in the map, where map corresponds to the electrode channels
- - ntrode_id: 1
- electrode_group_id: 0
+ - ntrode_id: 1
+ electrode_group_id: 0
bad_channels: [0,2]
map:
0: 0
@@ -179,9 +166,10 @@ It converts experiment data from `/raw` or `/preprocessing` folder to `.nwb` fil
1: 5
2: 6
3: 7
- ```
-5. Probe.yml description:
```
+
+5. Probe.yml description:
+ ```yaml
probe_type: tetrode_12.5 # Type of the probe that refers to device_type in electrode_group in metadata.yml
units: 'um' # possible value for unit is um or mm
probe_description: 'four wire electrode'
@@ -212,8 +200,8 @@ It converts experiment data from `/raw` or `/preprocessing` folder to `.nwb` fil
7. Input files `metadata.yml` as well as `probe[1-N].yml` are validated against rec files headers.
8. We provide two class to generate the NWB file.
-* `RawToNWBBuilder` - To generate NWB file from raw data.
-* `NWBFileBuilder` - To generate NWB file from preprocessed data.
+ * `RawToNWBBuilder` - To generate NWB file from raw data.
+ * `NWBFileBuilder` - To generate NWB file from preprocessed data.
##### Raw data
Initialize RawToNWBBuilder, which requires `animal_name`, `data_path` and `dates` which exist in your experiment folder. Next build the NWB using `build_nwb()`.
@@ -238,9 +226,9 @@ If you don't want mda or pos invalid/valid times in your nwb, set accordingly fl
**dates** = `list of strings` names of folders that contain experiment data
**nwb_metadata** = `MetadataManager` object with metadata.yml and probes.yml
-
+
**output_path** = `string` path specifying location and name of result file (dafault 'output.nwb')
-
+
**video_path** = `string` path specifying location of video files .h264 where those are copied
**extract_analog** = `boolean` flag specifying if analog data should be extracted from raw (default True)
@@ -254,22 +242,22 @@ If you don't want mda or pos invalid/valid times in your nwb, set accordingly fl
**extract_mda** = `boolean` flag specifying if mda data should be extracted from raw (default True)
**parallel_instances** = `int` number of threads, optimal value highly depends on hardware (default 4)
-
+
**overwrite** = `boolean` If true, will overwrite existing files. (default True)
-
+
**trodes_rec_export_args** = `tuple of strings` path to rec header file which overrides all headers existing in rec binary files e.g `_DEFAULT_TRODES_REC_EXPORT_ARGS = ('-reconfig', str(path) + '/test/processing/res/reconfig_header.xml')`
build_nwb arguments:
**process_mda_valid_time** = 'boolean' True if the mda valid times should be build and append to nwb.
Need the mda data inside the nwb. (default True)
-
+
**process_mda_invalid_time** = 'boolean' True if the mda invalid times should be build and append to nwb.
Need the mda data inside the nwb. (default True)
-
+
**process_pos_valid_time** = 'boolean' True if the pos valid times should be build and append to nwb.
Need the pos data inside the nwb. (default True)
-
+
**process_pos_invalid_time** = 'boolean' True if the pos invalid times should be build and append to nwb.
Need the pos data inside the nwb. (default True)
@@ -294,38 +282,38 @@ After that, you can add mda or pos invalid/valid data to your NWB, using 'build_
NWBFileBuilder arguments
**data_path** = `string` path to directory containing all experiments data
-
+
**animal_name** = `string` directory name which represents animal subject of experiment
-
+
**date** = `string` date of experiment
-
+
**nwb_metadata** = `MetadataManager` object contains metadata about experiment
-
+
**process_dio** = `boolean` flag if dio data should be processed
-
+
**process_mda** = `boolean` flag if mda data should be processed
-
+
**process_analog** = `boolean` flag if analog data should be processed
-
+
**video_path** = `string` path specifying location of video files .h264 where those are copied
-
+
**output_file** = `string` path and name specifying where .nwb file gonna be written
build_and_append_to_nwb arguments:
**process_mda_valid_time** = 'boolean' True if the mda valid times should be build and append to nwb.
Need the mda data inside the nwb. (default True)
-
+
**process_mda_invalid_time** = 'boolean' True if the mda invalid times should be build and append to nwb.
Need the mda data inside the nwb. (default True)
-
+
**process_pos_valid_time** = 'boolean' True if the pos valid times should be build and append to nwb.
Need the pos data inside the nwb. (default True)
-
+
**process_pos_invalid_time** = 'boolean' True if the pos invalid times should be build and append to nwb.
Need the pos data inside the nwb. (default True)
-9. Make sure that the data structure in given directory (in that case `test_data`) looks similar to following example:
+9. Make sure that the file and directory structure (in that case `test_data`) looks similar to following example. Please NOTE that rec_to_nwb will fail if the data is not in this format:
```bash
--test_data
|
@@ -560,4 +548,3 @@ After that, you can add mda or pos invalid/valid data to your NWB, using 'build_
|-- README.md
```
When processing completes, a nwb file is created in the output_path directory
-
diff --git a/environment.yml b/environment.yml
new file mode 100644
index 000000000..0417ff473
--- /dev/null
+++ b/environment.yml
@@ -0,0 +1,60 @@
+name: rec_to_nwb
+channels:
+ - conda-forge
+ - defaults
+ - franklab
+ - novelakrk
+dependencies:
+ - python>=3.6,<3.10
+ - rec_to_binaries
+ - pip
+ - hdmf>=3.1.1,<4
+ - pynwb>=2.0.0,<3
+ - asn1crypto
+ - blas
+ - ca-certificates
+ - certifi
+ - cffi
+ - chardet
+ - cryptography
+ - elementpath
+ - h5py<4
+ - hdf5
+ - idna
+ - libblas
+ - libcblas
+ - liblapack
+ - mkl
+ - mkl-service
+ - numpy
+ - openssl
+ - pandas
+ - pycparser
+ - pyopenssl
+ - pysocks
+ - python-dateutil
+ - pytz
+ - requests
+ - scipy
+ - setuptools
+ - six
+ - sqlite
+ - urllib3
+ - wheel
+ - xmlschema
+ - zlib
+ - vdom
+ - pyyaml
+ - pytest
+ - testfixtures
+ - jupyterlab
+ - nb_conda
+ - mountainlab_pytools
+ - xmldiff
+ - pip:
+ - git+https://github.com/LorenFrankLab/ndx-franklab-novela.git
+
+# Docs
+ - recommonmark
+ - sphinx-autoapi
+ - sphinx_rtd_theme
diff --git a/rec_to_nwb/__init__.py b/rec_to_nwb/__init__.py
index 256fa3794..b5850111f 100644
--- a/rec_to_nwb/__init__.py
+++ b/rec_to_nwb/__init__.py
@@ -1,4 +1,3 @@
-#__init__.py
from rec_to_nwb.processing.builder.nwb_file_builder import NWBFileBuilder
from rec_to_nwb.processing.builder.raw_to_nwb_builder import RawToNWBBuilder
from rec_to_nwb.processing.metadata.metadata_manager import MetadataManager
diff --git a/rec_to_nwb/logging.conf b/rec_to_nwb/logging.conf
index 49bc73b6c..90ea35dbc 100644
--- a/rec_to_nwb/logging.conf
+++ b/rec_to_nwb/logging.conf
@@ -8,7 +8,7 @@ keys=fileHandler
keys=fileFormatter
[logger_root]
-level = INFO
+level=INFO
handlers=fileHandler
[handler_fileHandler]
diff --git a/rec_to_nwb/processing/builder/__init__.py b/rec_to_nwb/processing/builder/__init__.py
index e69de29bb..ef1336565 100644
--- a/rec_to_nwb/processing/builder/__init__.py
+++ b/rec_to_nwb/processing/builder/__init__.py
@@ -0,0 +1,2 @@
+from rec_to_nwb.processing.builder.raw_to_nwb_builder import RawToNWBBuilder
+from rec_to_nwb.processing.metadata.metadata_manager import MetadataManager
diff --git a/rec_to_nwb/processing/builder/nwb_file_builder.py b/rec_to_nwb/processing/builder/nwb_file_builder.py
index bd2e599e3..2e85174e2 100644
--- a/rec_to_nwb/processing/builder/nwb_file_builder.py
+++ b/rec_to_nwb/processing/builder/nwb_file_builder.py
@@ -1,6 +1,7 @@
import logging.config
import os
import uuid
+from copy import deepcopy
from datetime import datetime
import pytz
@@ -8,31 +9,63 @@
from pynwb.file import Subject
from rec_to_nwb.processing.builder.originators.analog_originator import AnalogOriginator
-from rec_to_nwb.processing.builder.originators.associated_files_originator import AssociatedFilesOriginator
-from rec_to_nwb.processing.builder.originators.camera_device_originator import CameraDeviceOriginator
-from rec_to_nwb.processing.builder.originators.camera_sample_frame_counts_originator import \
- CameraSampleFrameCountsOriginator
-from rec_to_nwb.processing.builder.originators.data_acq_device_originator import DataAcqDeviceOriginator
+from rec_to_nwb.processing.builder.originators.associated_files_originator import (
+ AssociatedFilesOriginator,
+)
+from rec_to_nwb.processing.builder.originators.camera_device_originator import (
+ CameraDeviceOriginator,
+)
+from rec_to_nwb.processing.builder.originators.camera_sample_frame_counts_originator import (
+ CameraSampleFrameCountsOriginator,
+)
+from rec_to_nwb.processing.builder.originators.data_acq_device_originator import (
+ DataAcqDeviceOriginator,
+)
from rec_to_nwb.processing.builder.originators.dio_originator import DioOriginator
-from rec_to_nwb.processing.builder.originators.electrode_group_originator import ElectrodeGroupOriginator
-from rec_to_nwb.processing.builder.originators.electrodes_extension_originator import ElectrodesExtensionOriginator
-from rec_to_nwb.processing.builder.originators.electrodes_originator import ElectrodesOriginator
+from rec_to_nwb.processing.builder.originators.electrode_group_originator import (
+ ElectrodeGroupOriginator,
+)
+from rec_to_nwb.processing.builder.originators.electrodes_extension_originator import (
+ ElectrodesExtensionOriginator,
+)
+from rec_to_nwb.processing.builder.originators.electrodes_originator import (
+ ElectrodesOriginator,
+)
from rec_to_nwb.processing.builder.originators.epochs_originator import EpochsOriginator
-from rec_to_nwb.processing.builder.originators.header_device_originator import HeaderDeviceOriginator
-from rec_to_nwb.processing.builder.originators.mda_invalid_time_originator import MdaInvalidTimeOriginator
+from rec_to_nwb.processing.builder.originators.header_device_originator import (
+ HeaderDeviceOriginator,
+)
+from rec_to_nwb.processing.builder.originators.mda_invalid_time_originator import (
+ MdaInvalidTimeOriginator,
+)
from rec_to_nwb.processing.builder.originators.mda_originator import MdaOriginator
-from rec_to_nwb.processing.builder.originators.mda_valid_time_originator import MdaValidTimeOriginator
-from rec_to_nwb.processing.builder.originators.pos_invalid_originator import PosInvalidTimeOriginator
-from rec_to_nwb.processing.builder.originators.pos_valid_time_originator import PosValidTimeOriginator
-from rec_to_nwb.processing.builder.originators.position_originator import PositionOriginator
+from rec_to_nwb.processing.builder.originators.mda_valid_time_originator import (
+ MdaValidTimeOriginator,
+)
+from rec_to_nwb.processing.builder.originators.pos_invalid_originator import (
+ PosInvalidTimeOriginator,
+)
+from rec_to_nwb.processing.builder.originators.pos_valid_time_originator import (
+ PosValidTimeOriginator,
+)
+from rec_to_nwb.processing.builder.originators.position_originator import (
+ PositionOriginator,
+)
from rec_to_nwb.processing.builder.originators.probe_originator import ProbeOriginator
-from rec_to_nwb.processing.builder.originators.processing_module_originator import ProcessingModuleOriginator
-from rec_to_nwb.processing.builder.originators.sample_count_timestamp_corespondence_originator import \
- SampleCountTimestampCorespondenceOriginator
-from rec_to_nwb.processing.builder.originators.shanks_electrodes_originator import ShanksElectrodeOriginator
+from rec_to_nwb.processing.builder.originators.processing_module_originator import (
+ ProcessingModuleOriginator,
+)
+from rec_to_nwb.processing.builder.originators.sample_count_timestamp_corespondence_originator import (
+ SampleCountTimestampCorespondenceOriginator,
+)
+from rec_to_nwb.processing.builder.originators.shanks_electrodes_originator import (
+ ShanksElectrodeOriginator,
+)
from rec_to_nwb.processing.builder.originators.shanks_originator import ShanksOriginator
from rec_to_nwb.processing.builder.originators.task_originator import TaskOriginator
-from rec_to_nwb.processing.builder.originators.video_files_originator import VideoFilesOriginator
+from rec_to_nwb.processing.builder.originators.video_files_originator import (
+ VideoFilesOriginator,
+)
from rec_to_nwb.processing.header.header_checker.header_processor import HeaderProcessor
from rec_to_nwb.processing.header.header_checker.rec_file_finder import RecFileFinder
from rec_to_nwb.processing.header.module.header import Header
@@ -41,21 +74,38 @@
from rec_to_nwb.processing.nwb.common.session_time_extractor import SessionTimeExtractor
from rec_to_nwb.processing.nwb.components.device.device_factory import DeviceFactory
from rec_to_nwb.processing.nwb.components.device.device_injector import DeviceInjector
-from rec_to_nwb.processing.nwb.components.device.probe.fl_probe_manager import FlProbeManager
+from rec_to_nwb.processing.nwb.components.device.probe.fl_probe_manager import (
+ FlProbeManager,
+)
from rec_to_nwb.processing.tools.beartype.beartype import beartype
from rec_to_nwb.processing.tools.data_scanner import DataScanner
-from rec_to_nwb.processing.validation.associated_files_validator import AssociatedFilesExistanceValidator
-from rec_to_nwb.processing.validation.metadata_section_validator import MetadataSectionValidator
+from rec_to_nwb.processing.validation.associated_files_validator import (
+ AssociatedFilesExistanceValidator,
+)
+from rec_to_nwb.processing.validation.metadata_section_validator import (
+ MetadataSectionValidator,
+)
from rec_to_nwb.processing.validation.ntrode_validator import NTrodeValidator
from rec_to_nwb.processing.validation.path_validator import PathValidator
-from rec_to_nwb.processing.validation.preprocessing_validator import PreprocessingValidator
+from rec_to_nwb.processing.validation.preprocessing_validator import (
+ PreprocessingValidator,
+)
from rec_to_nwb.processing.validation.task_validator import TaskValidator
-from rec_to_nwb.processing.validation.validation_registrator import ValidationRegistrator
+from rec_to_nwb.processing.validation.validation_registrator import (
+ ValidationRegistrator,
+)
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir, "logging.conf"),
+ disable_existing_loggers=False,
+)
logger = logging.getLogger(__name__)
+# switches for old dataset timestamp processing (hard-coded for now)
+_CONVERT_OLD_TIMESTAMPS = True # False
+_RETURN_OLD_TIMESTAMPS = True # False
+
class NWBFileBuilder:
"""Unpack data from preprocessing folder specified by arguments, and write those data into NWB file format
@@ -79,31 +129,49 @@ class NWBFileBuilder:
@beartype
def __init__(
- self,
- data_path: str,
- animal_name: str,
- date: str,
- nwb_metadata: MetadataManager,
- process_dio: bool = True,
- process_mda: bool = True,
- process_analog: bool = True,
- process_pos_timestamps: bool = True,
- video_path: str = '',
- output_file: str = 'output.nwb',
- reconfig_header: str = ''
+ self,
+ data_path: str,
+ animal_name: str,
+ date: str,
+ nwb_metadata: MetadataManager,
+ process_dio: bool = True,
+ process_mda: bool = True,
+ process_analog: bool = True,
+ process_pos_timestamps: bool = True,
+ preprocessing_path: str = "",
+ video_path: str = "",
+ output_file: str = "output.nwb",
+ reconfig_header: str = "",
+ is_old_dataset: bool = False,
+ session_start_time=None,
):
-
- logger.info('NWBFileBuilder initialization')
+ logger.info("NWBFileBuilder initialization")
logger.info(
- 'NWB builder initialization parameters: \n'
- + 'data_path = ' + str(data_path) + '\n'
- + 'animal_name = ' + str(animal_name) + '\n'
- + 'date = ' + str(date) + '\n'
- + 'nwb_metadata = ' + str(nwb_metadata) + '\n'
- + 'process_dio = ' + str(process_dio) + '\n'
- + 'process_mda = ' + str(process_mda) + '\n'
- + 'process_analog = ' + str(process_analog) + '\n'
- + 'output_file = ' + str(output_file) + '\n'
+ "NWB builder initialization parameters: \n"
+ + "data_path = "
+ + str(data_path)
+ + "\n"
+ + "animal_name = "
+ + str(animal_name)
+ + "\n"
+ + "date = "
+ + str(date)
+ + "\n"
+ + "nwb_metadata = "
+ + str(nwb_metadata)
+ + "\n"
+ + "process_dio = "
+ + str(process_dio)
+ + "\n"
+ + "process_mda = "
+ + str(process_mda)
+ + "\n"
+ + "process_analog = "
+ + str(process_analog)
+ + "\n"
+ + "output_file = "
+ + str(output_file)
+ + "\n"
)
validation_registrator = ValidationRegistrator()
@@ -116,57 +184,80 @@ def __init__(
self.metadata = nwb_metadata.metadata
metadata_section_validator = MetadataSectionValidator(self.metadata)
metadata_section_validator.validate_sections()
- if self.metadata.get('associated_files', []):
- associated_files_existance_validator = AssociatedFilesExistanceValidator(self.metadata['associated_files'])
- if associated_files_existance_validator.files_exist():
- pass
- else:
- raise Exception("one or more associated file listed in metadata.yaml file does not exist")
+ if self.metadata.get("associated_files", []):
+ associated_files_existance_validator = AssociatedFilesExistanceValidator(
+ self.metadata["associated_files"]
+ )
+ if not associated_files_existance_validator.files_exist():
+ raise Exception(
+ "one or more associated file listed in metadata.yaml file does not exist"
+ )
self.probes = nwb_metadata.probes
self.process_dio = process_dio
self.process_mda = process_mda
self.process_analog = process_analog
self.process_pos_timestamps = process_pos_timestamps
+ if not preprocessing_path:
+ self.preprocessing_path = data_path
+ else:
+ self.preprocessing_path = preprocessing_path
self.output_file = output_file
self.video_path = video_path
- self.link_to_notes = self.metadata.get('link to notes', None)
- data_types_for_scanning = {'pos': True,
- 'time': True,
- 'mda': process_mda,
- 'DIO': process_dio,
- 'analog': process_analog}
+ self.is_old_dataset = is_old_dataset
+ self.link_to_notes = self.metadata.get("link to notes", None)
+ data_types_for_scanning = {
+ "pos": True,
+ "time": True,
+ "mda": process_mda,
+ "DIO": process_dio,
+ "analog": process_analog,
+ }
rec_files_list = RecFileFinder().find_rec_files(
- path=(self.data_path
- + '/' + self.animal_name
- + '/raw/'
- + self.date)
+ path=os.path.join(self.data_path, self.animal_name, "raw", self.date)
)
- header_file = HeaderProcessor.process_headers(rec_files_list)
+ if not preprocessing_path:
+ header_path = None # default
+ else:
+ header_path = os.path.join(
+ self.preprocessing_path, self.animal_name, "headers", self.date
+ )
+ os.makedirs(header_path, exist_ok=True)
+ header_file = HeaderProcessor.process_headers(
+ rec_files_list, copy_dir=header_path
+ )
if reconfig_header:
self.header = Header(reconfig_header)
else:
self.header = Header(header_file)
- self.data_scanner = DataScanner(data_path, animal_name, nwb_metadata)
+ self.data_scanner = DataScanner(
+ self.preprocessing_path, animal_name, nwb_metadata
+ )
self.dataset_names = self.data_scanner.get_all_epochs(date)
- full_data_path = data_path + '/' + animal_name + '/preprocessing/' + date
+ full_data_path = os.path.join(
+ self.preprocessing_path, self.animal_name, "preprocessing", date
+ )
validation_registrator = ValidationRegistrator()
- validation_registrator.register(NTrodeValidator(self.metadata, self.header, self.probes))
- validation_registrator.register(PreprocessingValidator(
- full_data_path,
- self.dataset_names,
- data_types_for_scanning
- ))
- validation_registrator.register(TaskValidator(self.metadata['tasks']))
+ validation_registrator.register(
+ NTrodeValidator(self.metadata, self.header, self.probes)
+ )
+ validation_registrator.register(
+ PreprocessingValidator(
+ full_data_path, self.dataset_names, data_types_for_scanning
+ )
+ )
+ validation_registrator.register(TaskValidator(self.metadata["tasks"]))
validation_registrator.validate()
self.__extract_datasets(animal_name, date)
self.corrupted_data_manager = CorruptedDataManager(self.metadata)
- self.shanks_electrode_originator = ShanksElectrodeOriginator(self.probes, self.metadata)
+ self.shanks_electrode_originator = ShanksElectrodeOriginator(
+ self.probes, self.metadata
+ )
self.shanks_originator = ShanksOriginator(self.probes, self.metadata)
self.fl_probe_manager = FlProbeManager(self.probes)
@@ -176,65 +267,122 @@ def __init__(
self.electrode_group_originator = ElectrodeGroupOriginator(self.metadata)
self.electrodes_originator = ElectrodesOriginator(self.probes, self.metadata)
- self.session_time_extractor = SessionTimeExtractor(
- self.datasets,
- self.animal_name,
- self.date,
- self.dataset_names
- )
+ if self.is_old_dataset:
+ if not session_start_time:
+ raise ValueError("session_start_time is required for old dataset.")
+ self.session_start_time = session_start_time
+ else:
+ session_time_extractor = SessionTimeExtractor(
+ self.datasets, self.animal_name, self.date, self.dataset_names
+ )
+ self.session_start_time = session_time_extractor.get_session_start_time()
- self.mda_valid_time_originator = MdaValidTimeOriginator(self.header, self.metadata)
- self.mda_invalid_time_originator = MdaInvalidTimeOriginator(self.header, self.metadata)
+ self.mda_valid_time_originator = MdaValidTimeOriginator(
+ self.header, self.metadata
+ )
+ self.mda_invalid_time_originator = MdaInvalidTimeOriginator(
+ self.header, self.metadata
+ )
self.pos_valid_time_originator = PosValidTimeOriginator(self.metadata)
self.pos_invalid_time_originator = PosInvalidTimeOriginator(self.metadata)
self.epochs_originator = EpochsOriginator(self.datasets)
- if 'associated_files' in self.metadata:
+ if "associated_files" in self.metadata:
self.associated_files_originator = AssociatedFilesOriginator(self.metadata)
self.electrodes_extension_originator = ElectrodesExtensionOriginator(
- self.probes,
- self.metadata,
- self.header
+ self.probes, self.metadata, self.header
)
- self.sample_count_timestamp_corespondence_originator =\
+ self.sample_count_timestamp_corespondence_originator = (
SampleCountTimestampCorespondenceOriginator(self.datasets)
+ )
self.processing_module_originator = ProcessingModuleOriginator()
self.task_originator = TaskOriginator(self.metadata)
self.camera_device_originator = CameraDeviceOriginator(self.metadata)
- self.header_device_originator = HeaderDeviceOriginator(self.header, self.metadata)
- self.probes_originator = ProbeOriginator(self.device_factory, self.device_injector, self.probes)
+ self.header_device_originator = HeaderDeviceOriginator(
+ self.header, self.metadata
+ )
+ self.probes_originator = ProbeOriginator(
+ self.device_factory, self.device_injector, self.probes
+ )
self.camera_sample_frame_counts_originator = CameraSampleFrameCountsOriginator(
- self.data_path + "/" + animal_name + "/raw/" + self.date + "/")
- self.video_files_originator = VideoFilesOriginator(
- self.data_path + "/" + animal_name + "/raw/" + self.date + "/",
- self.video_path,
- self.metadata["associated_video_files"],
+ os.path.join(self.data_path, self.animal_name, "raw", self.date)
)
+ if self.is_old_dataset:
+ self.video_files_originator = VideoFilesOriginator(
+ os.path.join(self.data_path, self.animal_name, "raw", self.date),
+ self.video_path,
+ self.metadata["associated_video_files"],
+ convert_timestamps=_CONVERT_OLD_TIMESTAMPS,
+ return_timestamps=_RETURN_OLD_TIMESTAMPS,
+ )
+ else:
+ self.video_files_originator = VideoFilesOriginator(
+ os.path.join(self.data_path, self.animal_name, "raw", self.date),
+ self.video_path,
+ self.metadata["associated_video_files"],
+ )
self.data_acq_device_originator = DataAcqDeviceOriginator(
device_factory=self.device_factory,
device_injector=self.device_injector,
- metadata=self.metadata['data acq device']
+ metadata=self.metadata["data_acq_device"],
)
if self.process_mda:
- self.mda_originator = MdaOriginator(self.datasets, self.header, self.metadata)
+ self.mda_originator = MdaOriginator(
+ self.datasets, self.header, self.metadata
+ )
if self.process_dio:
- self.dio_originator = DioOriginator(self.metadata, self.datasets)
+ if self.is_old_dataset:
+ self.dio_originator = DioOriginator(
+ self.metadata,
+ self.datasets,
+ convert_timestamps=_CONVERT_OLD_TIMESTAMPS,
+ )
+ else:
+ self.dio_originator = DioOriginator(self.metadata, self.datasets)
if self.process_analog:
- self.analog_originator = AnalogOriginator(self.datasets, self.metadata)
+ if self.is_old_dataset:
+ self.analog_originator = AnalogOriginator(
+ self.datasets,
+ self.metadata,
+ convert_timestamps=_CONVERT_OLD_TIMESTAMPS,
+ return_timestamps=_RETURN_OLD_TIMESTAMPS,
+ )
+ else:
+ self.analog_originator = AnalogOriginator(self.datasets, self.metadata)
+
+ ptp_enabled = self._detect_ptp_from_header()
- self.position_originator = PositionOriginator(self.datasets, self.metadata,
- self.dataset_names, self.process_pos_timestamps)
+ self.position_originator = PositionOriginator(
+ self.datasets, self.metadata, self.dataset_names, ptp_enabled
+ )
+
+ def _detect_ptp_from_header(self):
+ mconf = self.header.tree.find("ModuleConfiguration")
+ ptp_enabled = False
+ for smconf in mconf.findall("SingleModuleConfiguration"):
+ if smconf.get("moduleName") in ["cameraModule", "./cameraModule"]:
+ for arg in smconf.findall("Argument"):
+ ptp_enabled = "-ptpEnabled" in arg.attrib.values()
+ if ptp_enabled:
+ break
+ if ptp_enabled:
+ break
+ logger.info("PTP enabled: " + str(ptp_enabled))
+ return ptp_enabled
def __extract_datasets(self, animal_name, date):
self.data_scanner.extract_data_from_date_folder(date)
- self.datasets = [self.data_scanner.data[animal_name][date][dataset] for dataset in self.dataset_names]
+ self.datasets = [
+ self.data_scanner.data[animal_name][date][dataset]
+ for dataset in self.dataset_names
+ ]
def build(self):
"""Build NWBFile
@@ -243,31 +391,47 @@ def build(self):
NWBFile: Return NWBFile content
"""
- logger.info('Building components for NWB')
+ logger.info("Building components for NWB")
+
+ # Convert date of birth to datetime object
+ subject_metadata = deepcopy(self.metadata["subject"])
+ try:
+ subject_metadata.update(
+ {
+ "date_of_birth": datetime.strptime(
+ subject_metadata["date_of_birth"], "%Y-%m-%dT%H:%M:%S.%fZ"
+ )
+ }
+ )
+ except ValueError:
+ subject_metadata.update(
+ {
+ "date_of_birth": datetime.strptime(
+ subject_metadata["date_of_birth"], "%Y%m%d"
+ )
+ }
+ )
+
+ # Convert weight to string and add units
+ subject_metadata.update({"weight": f"{subject_metadata['weight']} g"})
+
nwb_content = NWBFile(
- session_description=self.metadata['session description'],
- experimenter=self.metadata['experimenter name'],
- lab=self.metadata['lab'],
- institution=self.metadata['institution'],
- session_start_time=self.session_time_extractor.get_session_start_time(),
+ session_description=self.metadata["session_description"],
+ experimenter=self.metadata["experimenter_name"],
+ lab=self.metadata["lab"],
+ institution=self.metadata["institution"],
+ session_start_time=self.session_start_time,
timestamps_reference_time=datetime.fromtimestamp(0, pytz.utc),
identifier=str(uuid.uuid1()),
- session_id=self.metadata['session_id'],
+ session_id=self.metadata["session_id"],
notes=self.link_to_notes,
- experiment_description=self.metadata['experiment description'],
- subject=Subject(
- description=self.metadata['subject']['description'],
- genotype=self.metadata['subject']['genotype'],
- sex=self.metadata['subject']['sex'],
- species=self.metadata['subject']['species'],
- subject_id=self.metadata['subject']['subject id'],
- weight=str(self.metadata['subject']['weight']),
- ),
+ experiment_description=self.metadata["experiment_description"],
+ subject=Subject(**subject_metadata),
)
self.processing_module_originator.make(nwb_content)
- if 'associated_files' in self.metadata:
+ if "associated_files" in self.metadata:
self.associated_files_originator.make(nwb_content)
self.position_originator.make(nwb_content)
@@ -278,7 +442,9 @@ def build(self):
shanks_dict = self.shanks_originator.make(shanks_electrodes_dict)
- probes = self.probes_originator.make(nwb_content, shanks_dict, valid_map_dict['probes'])
+ probes = self.probes_originator.make(
+ nwb_content, shanks_dict, valid_map_dict["probes"]
+ )
self.data_acq_device_originator.make(nwb_content)
@@ -289,14 +455,19 @@ def build(self):
self.video_files_originator.make(nwb_content)
electrode_groups = self.electrode_group_originator.make(
- nwb_content, probes, valid_map_dict['electrode_groups']
+ nwb_content, probes, valid_map_dict["electrode_groups"]
)
self.electrodes_originator.make(
- nwb_content, electrode_groups, valid_map_dict['electrodes'], valid_map_dict['electrode_groups']
+ nwb_content,
+ electrode_groups,
+ valid_map_dict["electrodes"],
+ valid_map_dict["electrode_groups"],
)
- self.electrodes_extension_originator.make(nwb_content, valid_map_dict['electrodes'])
+ self.electrodes_extension_originator.make(
+ nwb_content, valid_map_dict["electrodes"]
+ )
self.epochs_originator.make(nwb_content)
@@ -320,20 +491,25 @@ def build(self):
def write(self, content):
"""Write nwb file handler with colected data into actual file"""
- logger.info('Writing down content to ' + self.output_file)
- with NWBHDF5IO(path=self.output_file, mode='w') as nwb_fileIO:
+ logger.info("Writing down content to " + self.output_file)
+ with NWBHDF5IO(path=self.output_file, mode="w") as nwb_fileIO:
nwb_fileIO.write(content)
nwb_fileIO.close()
- logger.info(self.output_file + ' file has been created.')
+ logger.info(self.output_file + " file has been created.")
return self.output_file
def __build_corrupted_data_manager(self):
- logger.info('CorruptedData: Checking')
+ logger.info("CorruptedData: Checking")
return self.corrupted_data_manager.get_valid_map_dict()
- def build_and_append_to_nwb(self, process_mda_valid_time=True, process_mda_invalid_time=True,
- process_pos_valid_time=True, process_pos_invalid_time=True):
+ def build_and_append_to_nwb(
+ self,
+ process_mda_valid_time=True,
+ process_mda_invalid_time=True,
+ process_pos_valid_time=True,
+ process_pos_invalid_time=True,
+ ):
"""Create and append to existing nwb. Set flag to add it to nwb
Args:
@@ -353,7 +529,7 @@ def build_and_append_to_nwb(self, process_mda_valid_time=True, process_mda_inval
NWBFile: Return NWBFile content
"""
- with NWBHDF5IO(path=self.output_file, mode='a') as nwb_file_io:
+ with NWBHDF5IO(path=self.output_file, mode="a") as nwb_file_io:
nwb_content = nwb_file_io.read()
if self.process_pos_timestamps:
@@ -367,5 +543,4 @@ def build_and_append_to_nwb(self, process_mda_valid_time=True, process_mda_inval
if process_mda_invalid_time:
self.mda_invalid_time_originator.make(nwb_content)
-
nwb_file_io.write(nwb_content)
diff --git a/rec_to_nwb/processing/builder/old_nwb_file_builder.py b/rec_to_nwb/processing/builder/old_nwb_file_builder.py
deleted file mode 100644
index 8a3f2ce9a..000000000
--- a/rec_to_nwb/processing/builder/old_nwb_file_builder.py
+++ /dev/null
@@ -1,367 +0,0 @@
-import logging.config
-import os
-import uuid
-from datetime import datetime
-
-import pytz
-from pynwb import NWBHDF5IO, NWBFile
-from pynwb.file import Subject
-
-from rec_to_nwb.processing.builder.originators.associated_files_originator import AssociatedFilesOriginator
-from rec_to_nwb.processing.builder.originators.camera_device_originator import CameraDeviceOriginator
-from rec_to_nwb.processing.builder.originators.camera_sample_frame_counts_originator import \
- CameraSampleFrameCountsOriginator
-from rec_to_nwb.processing.builder.originators.data_acq_device_originator import DataAcqDeviceOriginator
-from rec_to_nwb.processing.builder.originators.electrode_group_originator import ElectrodeGroupOriginator
-from rec_to_nwb.processing.builder.originators.electrodes_extension_originator import ElectrodesExtensionOriginator
-from rec_to_nwb.processing.builder.originators.electrodes_originator import ElectrodesOriginator
-from rec_to_nwb.processing.builder.originators.epochs_originator import EpochsOriginator
-from rec_to_nwb.processing.builder.originators.header_device_originator import HeaderDeviceOriginator
-from rec_to_nwb.processing.builder.originators.mda_invalid_time_originator import MdaInvalidTimeOriginator
-from rec_to_nwb.processing.builder.originators.mda_valid_time_originator import MdaValidTimeOriginator
-from rec_to_nwb.processing.builder.originators.old_analog_originator import OldAnalogOriginator
-from rec_to_nwb.processing.builder.originators.old_dio_originator import OldDioOriginator
-from rec_to_nwb.processing.builder.originators.old_mda_originator import OldMdaOriginator
-from rec_to_nwb.processing.builder.originators.old_position_originator import OldPositionOriginator
-from rec_to_nwb.processing.builder.originators.old_video_files_originator import OldVideoFilesOriginator
-from rec_to_nwb.processing.builder.originators.pos_invalid_originator import PosInvalidTimeOriginator
-from rec_to_nwb.processing.builder.originators.pos_valid_time_originator import PosValidTimeOriginator
-from rec_to_nwb.processing.builder.originators.probe_originator import ProbeOriginator
-from rec_to_nwb.processing.builder.originators.processing_module_originator import ProcessingModuleOriginator
-from rec_to_nwb.processing.builder.originators.sample_count_timestamp_corespondence_originator import \
- SampleCountTimestampCorespondenceOriginator
-from rec_to_nwb.processing.builder.originators.shanks_electrodes_originator import ShanksElectrodeOriginator
-from rec_to_nwb.processing.builder.originators.shanks_originator import ShanksOriginator
-from rec_to_nwb.processing.builder.originators.task_originator import TaskOriginator
-from rec_to_nwb.processing.header.header_checker.header_processor import HeaderProcessor
-from rec_to_nwb.processing.header.header_checker.rec_file_finder import RecFileFinder
-from rec_to_nwb.processing.header.module.header import Header
-from rec_to_nwb.processing.metadata.corrupted_data_manager import CorruptedDataManager
-from rec_to_nwb.processing.metadata.metadata_manager import MetadataManager
-from rec_to_nwb.processing.nwb.components.device.device_factory import DeviceFactory
-from rec_to_nwb.processing.nwb.components.device.device_injector import DeviceInjector
-from rec_to_nwb.processing.nwb.components.device.probe.fl_probe_manager import FlProbeManager
-from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.data_scanner import DataScanner
-from rec_to_nwb.processing.validation.associated_files_validator import AssociatedFilesExistanceValidator
-from rec_to_nwb.processing.validation.metadata_section_validator import MetadataSectionValidator
-from rec_to_nwb.processing.validation.ntrode_validator import NTrodeValidator
-from rec_to_nwb.processing.validation.path_validator import PathValidator
-from rec_to_nwb.processing.validation.preprocessing_validator import PreprocessingValidator
-from rec_to_nwb.processing.validation.task_validator import TaskValidator
-from rec_to_nwb.processing.validation.validation_registrator import ValidationRegistrator
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-
-class OldNWBFileBuilder:
- """Unpack data from preprocessing folder specified by arguments, and write those data into NWB file format
-
- Args:
- data_path (string): path to directory containing all experiments data
- animal_name (string): directory name which represents animal subject of experiment
- date (string): date of experiment
- nwb_metadata (MetadataManager): object contains metadata about experiment
- process_dio (boolean): flag if dio data should be processed
- process_mda (boolean): flag if mda data should be processed
- process_analog (boolean): flag if analog data should be processed
- video_path (string): path to directory with video files associated to nwb file
- output_file (string): path and name specifying where .nwb file gonna be written
-
- Methods:
- build()
- write()
- build_and_append_to_nwb()
- """
-
- @beartype
- def __init__(
- self,
- data_path: str,
- animal_name: str,
- date: str,
- session_start_time,
- nwb_metadata: MetadataManager,
- process_dio: bool = True,
- process_mda: bool = True,
- process_analog: bool = True,
- process_pos_timestamps: bool = True,
- video_path: str = '',
- output_file: str = 'output.nwb',
- reconfig_header: str = ''
- ):
-
- logger.info('NWBFileBuilder initialization')
- logger.info(
- 'NWB builder initialization parameters: \n'
- + 'data_path = ' + str(data_path) + '\n'
- + 'animal_name = ' + str(animal_name) + '\n'
- + 'date = ' + str(date) + '\n'
- + 'nwb_metadata = ' + str(nwb_metadata) + '\n'
- + 'process_dio = ' + str(process_dio) + '\n'
- + 'process_mda = ' + str(process_mda) + '\n'
- + 'process_analog = ' + str(process_analog) + '\n'
- + 'output_file = ' + str(output_file) + '\n'
- )
-
- validation_registrator = ValidationRegistrator()
- validation_registrator.register(PathValidator(data_path))
- validation_registrator.validate()
-
- self.animal_name = animal_name
- self.date = date
- self.data_path = data_path
- self.metadata = nwb_metadata.metadata
- metadata_section_validator = MetadataSectionValidator(self.metadata)
- metadata_section_validator.validate_sections()
- if self.metadata.get('associated_files', []):
- associated_files_existance_validator = AssociatedFilesExistanceValidator(self.metadata['associated_files'])
- if associated_files_existance_validator.files_exist():
- pass
- else:
- raise Exception("one or more associated file listed in metadata.yaml file does not exist")
- self.probes = nwb_metadata.probes
- self.process_dio = process_dio
- self.process_mda = process_mda
- self.process_analog = process_analog
- self.process_pos_timestamps = process_pos_timestamps
- self.output_file = output_file
- self.video_path = video_path
- self.link_to_notes = self.metadata.get('link to notes', None)
- data_types_for_scanning = {'pos': True,
- 'time': True,
- 'mda': process_mda,
- 'DIO': process_dio,
- 'analog': process_analog}
-
- rec_files_list = RecFileFinder().find_rec_files(
- path=(self.data_path
- + '/' + self.animal_name
- + '/raw/'
- + self.date)
- )
-
- header_file = HeaderProcessor.process_headers(rec_files_list)
- if reconfig_header:
- self.header = Header(reconfig_header)
- else:
- self.header = Header(header_file)
- self.data_scanner = DataScanner(data_path, animal_name, nwb_metadata)
- self.dataset_names = self.data_scanner.get_all_epochs(date)
- full_data_path = data_path + '/' + animal_name + '/preprocessing/' + date
-
- validation_registrator = ValidationRegistrator()
- validation_registrator.register(NTrodeValidator(self.metadata, self.header, self.probes))
- validation_registrator.register(PreprocessingValidator(
- full_data_path,
- self.dataset_names,
- data_types_for_scanning
- ))
- validation_registrator.register(TaskValidator(self.metadata['tasks']))
- validation_registrator.validate()
-
- self.__extract_datasets(animal_name, date)
-
- self.corrupted_data_manager = CorruptedDataManager(self.metadata)
-
- self.shanks_electrode_originator = ShanksElectrodeOriginator(self.probes, self.metadata)
- self.shanks_originator = ShanksOriginator(self.probes, self.metadata)
-
- self.fl_probe_manager = FlProbeManager(self.probes)
- self.device_injector = DeviceInjector()
- self.device_factory = DeviceFactory()
-
- self.electrode_group_originator = ElectrodeGroupOriginator(self.metadata)
- self.electrodes_originator = ElectrodesOriginator(self.probes, self.metadata)
-
- self.session_start_time = session_start_time
-
- self.mda_valid_time_originator = MdaValidTimeOriginator(self.header, self.metadata)
- self.mda_invalid_time_originator = MdaInvalidTimeOriginator(self.header, self.metadata)
- self.pos_valid_time_originator = PosValidTimeOriginator(self.metadata)
- self.pos_invalid_time_originator = PosInvalidTimeOriginator(self.metadata)
-
- self.epochs_originator = EpochsOriginator(self.datasets)
-
- if 'associated_files' in self.metadata:
- self.associated_files_originator = AssociatedFilesOriginator(self.metadata)
-
- self.electrodes_extension_originator = ElectrodesExtensionOriginator(
- self.probes,
- self.metadata,
- self.header
- )
-
- self.sample_count_timestamp_corespondence_originator =\
- SampleCountTimestampCorespondenceOriginator(self.datasets)
- self.processing_module_originator = ProcessingModuleOriginator()
- self.task_originator = TaskOriginator(self.metadata)
- self.camera_device_originator = CameraDeviceOriginator(self.metadata)
- self.header_device_originator = HeaderDeviceOriginator(self.header, self.metadata)
- self.probes_originator = ProbeOriginator(self.device_factory, self.device_injector, self.probes)
- self.camera_sample_frame_counts_originator = CameraSampleFrameCountsOriginator(
- self.data_path + "/" + animal_name + "/raw/" + self.date + "/")
- self.old_video_files_originator = OldVideoFilesOriginator(
- self.data_path + "/" + animal_name + "/raw/" + self.date + "/",
- self.video_path,
- self.metadata["associated_video_files"],
- )
-
- self.data_acq_device_originator = DataAcqDeviceOriginator(
- device_factory=self.device_factory,
- device_injector=self.device_injector,
- metadata=self.metadata['data acq device']
- )
-
- if self.process_mda:
- self.old_mda_originator = OldMdaOriginator(self.datasets, self.header, self.metadata)
-
- if self.process_dio:
- self.old_dio_originator = OldDioOriginator(self.metadata, self.datasets)
-
- if self.process_analog:
- self.old_analog_originator = OldAnalogOriginator(self.datasets, self.metadata)
-
- self.old_position_originator = OldPositionOriginator(self.datasets, self.metadata,
- self.dataset_names, self.process_pos_timestamps)
-
- def __extract_datasets(self, animal_name, date):
- self.data_scanner.extract_data_from_date_folder(date)
- self.datasets = [self.data_scanner.data[animal_name][date][dataset] for dataset in self.dataset_names]
-
- def build(self):
- """Build NWBFile
-
- Returns:
- NWBFile: Return NWBFile content
- """
-
- logger.info('Building components for NWB')
- nwb_content = NWBFile(
- session_description=self.metadata['session description'],
- experimenter=self.metadata['experimenter name'],
- lab=self.metadata['lab'],
- institution=self.metadata['institution'],
- session_start_time=self.session_start_time,
- timestamps_reference_time=datetime.fromtimestamp(0, pytz.utc),
- identifier=str(uuid.uuid1()),
- session_id=self.metadata['session_id'],
- notes=self.link_to_notes,
- experiment_description=self.metadata['experiment description'],
- subject=Subject(
- description=self.metadata['subject']['description'],
- genotype=self.metadata['subject']['genotype'],
- sex=self.metadata['subject']['sex'],
- species=self.metadata['subject']['species'],
- subject_id=self.metadata['subject']['subject id'],
- weight=str(self.metadata['subject']['weight']),
- ),
- )
-
- self.processing_module_originator.make(nwb_content)
-
- self.old_video_files_originator.make(nwb_content)
-
- if 'associated_files' in self.metadata:
- self.associated_files_originator.make(nwb_content)
-
- self.old_position_originator.make(nwb_content)
-
- valid_map_dict = self.__build_corrupted_data_manager()
-
- shanks_electrodes_dict = self.shanks_electrode_originator.make()
-
- shanks_dict = self.shanks_originator.make(shanks_electrodes_dict)
-
- probes = self.probes_originator.make(nwb_content, shanks_dict, valid_map_dict['probes'])
-
- self.data_acq_device_originator.make(nwb_content)
-
- self.header_device_originator.make(nwb_content)
-
- self.camera_device_originator.make(nwb_content)
-
- electrode_groups = self.electrode_group_originator.make(
- nwb_content, probes, valid_map_dict['electrode_groups']
- )
-
- self.electrodes_originator.make(
- nwb_content, electrode_groups, valid_map_dict['electrodes'], valid_map_dict['electrode_groups']
- )
-
- self.electrodes_extension_originator.make(nwb_content, valid_map_dict['electrodes'])
-
- self.epochs_originator.make(nwb_content)
-
- self.sample_count_timestamp_corespondence_originator.make(nwb_content)
-
- self.task_originator.make(nwb_content)
-
- self.camera_sample_frame_counts_originator.make(nwb_content)
-
- if self.process_dio:
- self.old_dio_originator.make(nwb_content)
-
- if self.process_analog:
- self.old_analog_originator.make(nwb_content)
-
- if self.process_mda:
- self.old_mda_originator.make(nwb_content)
-
- return nwb_content
-
- def write(self, content):
- """Write nwb file handler with colected data into actual file"""
-
- logger.info('Writing down content to ' + self.output_file)
- with NWBHDF5IO(path=self.output_file, mode='w') as nwb_fileIO:
- nwb_fileIO.write(content)
- nwb_fileIO.close()
-
- logger.info(self.output_file + ' file has been created.')
- return self.output_file
-
- def __build_corrupted_data_manager(self):
- logger.info('CorruptedData: Checking')
- return self.corrupted_data_manager.get_valid_map_dict()
-
- def build_and_append_to_nwb(self, process_mda_valid_time=True, process_mda_invalid_time=True,
- process_pos_valid_time=True, process_pos_invalid_time=True):
- """Create and append to existing nwb. Set flag to add it to nwb
-
- Args:
- process_mda_valid_time (boolean): True if the mda valid times should be build and append to nwb.
- Need the mda data inside the nwb. (default True)
- process_mda_invalid_time (boolean): True if the mda invalid times should be build and append to nwb.
- Need the mda data inside the nwb. (default True)
- process_pos_valid_time (boolean): True if the pos valid times should be build and append to nwb.
- Need the pos data inside the nwb. (default True)
- process_pos_invalid_time (boolean): True if the pos invalid times should be build and append to nwb.
- Need the pos data inside the nwb. (default True)
-
- Raises:
- ElementExistException: If element already exist in NWB
-
- Returns:
- NWBFile: Return NWBFile content
- """
-
- with NWBHDF5IO(path=self.output_file, mode='a') as nwb_file_io:
- nwb_content = nwb_file_io.read()
-
- if self.process_pos_timestamps:
- if process_pos_valid_time:
- self.pos_valid_time_originator.make(nwb_content)
- if process_pos_invalid_time:
- self.pos_invalid_time_originator.make(nwb_content)
-
- if process_mda_valid_time:
- self.mda_valid_time_originator.make(nwb_content)
- if process_mda_invalid_time:
- self.mda_invalid_time_originator.make(nwb_content)
-
-
- nwb_file_io.write(nwb_content)
diff --git a/rec_to_nwb/processing/builder/old_originators/__init__.py b/rec_to_nwb/processing/builder/old_originators/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/rec_to_nwb/processing/builder/old_raw_to_nwb_builder.py b/rec_to_nwb/processing/builder/old_raw_to_nwb_builder.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/rec_to_nwb/processing/builder/originators/analog_originator.py b/rec_to_nwb/processing/builder/originators/analog_originator.py
index 6b1d7002a..4642309bf 100644
--- a/rec_to_nwb/processing/builder/originators/analog_originator.py
+++ b/rec_to_nwb/processing/builder/originators/analog_originator.py
@@ -1,26 +1,42 @@
-from rec_to_nwb.processing.nwb.components.analog.analog_creator import AnalogCreator
-from rec_to_nwb.processing.nwb.components.analog.analog_files import AnalogFiles
-from rec_to_nwb.processing.nwb.components.analog.analog_injector import AnalogInjector
-from rec_to_nwb.processing.nwb.components.analog.fl_analog_manager import FlAnalogManager
+from rec_to_nwb.processing.nwb.components.analog.analog_creator import \
+ AnalogCreator
+from rec_to_nwb.processing.nwb.components.analog.analog_files import \
+ AnalogFiles
+from rec_to_nwb.processing.nwb.components.analog.analog_injector import \
+ AnalogInjector
+from rec_to_nwb.processing.nwb.components.analog.fl_analog_manager import \
+ FlAnalogManager
class AnalogOriginator:
- def __init__(self, datasets, metadata):
+ def __init__(self, datasets, metadata,
+ convert_timestamps: bool = True,
+ return_timestamps: bool = True,
+ ):
self.datasets = datasets
self.metadata = metadata
self.continuous_time_files = self.__get_continuous_time_files()
+ # timestamp processing switches (optionally turn off for old dataset)
+ self.convert_timestamps = convert_timestamps
+ self.return_timestamps = return_timestamps
+
def make(self, nwb_content):
- analog_directories = [single_dataset.get_data_path_from_dataset('analog') for single_dataset in self.datasets]
+ analog_directories = [single_dataset.get_data_path_from_dataset(
+ 'analog') for single_dataset in self.datasets]
analog_files = AnalogFiles(analog_directories)
analog_manager = FlAnalogManager(
analog_files=analog_files.get_files(),
- continuous_time_files=self.continuous_time_files
+ continuous_time_files=self.continuous_time_files,
+ convert_timestamps=self.convert_timestamps,
+ return_timestamps=self.return_timestamps,
)
fl_analog = analog_manager.get_analog()
analog_injector = AnalogInjector(nwb_content)
- analog_injector.inject(AnalogCreator.create(fl_analog, self.metadata['units']['analog']), 'analog')
+ analog_injector.inject(AnalogCreator.create(
+ fl_analog, self.metadata['units']['analog']), 'analog')
def __get_continuous_time_files(self):
- return [single_dataset.get_continuous_time() for single_dataset in self.datasets]
+ return [single_dataset.get_continuous_time()
+ for single_dataset in self.datasets]
diff --git a/rec_to_nwb/processing/builder/originators/associated_files_originator.py b/rec_to_nwb/processing/builder/originators/associated_files_originator.py
index ea56e3299..9f0177689 100644
--- a/rec_to_nwb/processing/builder/originators/associated_files_originator.py
+++ b/rec_to_nwb/processing/builder/originators/associated_files_originator.py
@@ -1,14 +1,22 @@
import logging.config
import os
-from rec_to_nwb.processing.nwb.components.associated_files.associated_files_creator import AssociatedFilesCreator
-from rec_to_nwb.processing.nwb.components.associated_files.associated_files_injector import AssociatedFilesInjector
-from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_files_manager import FlAssociatedFilesManager
-from rec_to_nwb.processing.validation.associated_files_validation import AssociatedFilesValidator
-from rec_to_nwb.processing.validation.validation_registrator import ValidationRegistrator
+from rec_to_nwb.processing.nwb.components.associated_files.associated_files_creator import \
+ AssociatedFilesCreator
+from rec_to_nwb.processing.nwb.components.associated_files.associated_files_injector import \
+ AssociatedFilesInjector
+from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_files_manager import \
+ FlAssociatedFilesManager
+from rec_to_nwb.processing.validation.associated_files_validation import \
+ AssociatedFilesValidator
+from rec_to_nwb.processing.validation.validation_registrator import \
+ ValidationRegistrator
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -17,7 +25,8 @@ class AssociatedFilesOriginator:
def __init__(self, metadata):
if 'associated_files' in metadata:
validation_registrator = ValidationRegistrator()
- validation_registrator.register(AssociatedFilesValidator(metadata['associated_files']))
+ validation_registrator.register(
+ AssociatedFilesValidator(metadata['associated_files']))
validation_registrator.validate()
self.fl_associated_files_manager = FlAssociatedFilesManager(
metadata['associated_files']
@@ -34,5 +43,5 @@ def make(self, nwb_content):
for fl_associated_file in fl_associated_files
]
logger.info('AssociatedFiles: Injecting')
- self.associated_files_injector.inject(associated_files, 'associated_files', nwb_content)
- logger.info("Files stored inside nwb: " + str(associated_files))
+ self.associated_files_injector.inject(
+ associated_files, 'associated_files', nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/camera_device_originator.py b/rec_to_nwb/processing/builder/originators/camera_device_originator.py
index 2e0260b09..b7b2f45e5 100644
--- a/rec_to_nwb/processing/builder/originators/camera_device_originator.py
+++ b/rec_to_nwb/processing/builder/originators/camera_device_originator.py
@@ -1,12 +1,18 @@
import logging.config
import os
-from rec_to_nwb.processing.nwb.components.device.camera.fl_camera_device_manager import FlCameraDeviceManager
-from rec_to_nwb.processing.nwb.components.device.device_injector import DeviceInjector
-from rec_to_nwb.processing.nwb.components.device.device_factory import DeviceFactory
+from rec_to_nwb.processing.nwb.components.device.camera.fl_camera_device_manager import \
+ FlCameraDeviceManager
+from rec_to_nwb.processing.nwb.components.device.device_factory import \
+ DeviceFactory
+from rec_to_nwb.processing.nwb.components.device.device_injector import \
+ DeviceInjector
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
diff --git a/rec_to_nwb/processing/builder/originators/camera_sample_frame_counts_originator.py b/rec_to_nwb/processing/builder/originators/camera_sample_frame_counts_originator.py
index c0a93f3a8..8fc1156d1 100644
--- a/rec_to_nwb/processing/builder/originators/camera_sample_frame_counts_originator.py
+++ b/rec_to_nwb/processing/builder/originators/camera_sample_frame_counts_originator.py
@@ -1,5 +1,7 @@
-import os
+"""Gets the video frame counts and timestamps for all epochs, inserts them
+into an NWB timeseries object, and puts it in an NWB File"""
import logging.config
+import os
from rec_to_nwb.processing.nwb.components.video_files.camera_sample_frame_counts.camera_sample_frame_counts_injector import \
CameraSampleFrameCountsInjector
@@ -7,7 +9,10 @@
CameraSampleFrameCountsManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -16,6 +21,10 @@ def __init__(self, raw_data_path):
self.raw_data_path = raw_data_path
def make(self, nwb_content):
+ """Gets the video frame counts and timestamps for all epochs, inserts them
+ into an NWB timeseries object, and puts it in an NWB File under
+ the processing module `camera_sample_frame_counts`
+ """
logger.info('Camera Sample Frame Counts Builder: Building')
manager = CameraSampleFrameCountsManager(
raw_data_path=self.raw_data_path
diff --git a/rec_to_nwb/processing/builder/originators/data_acq_device_originator.py b/rec_to_nwb/processing/builder/originators/data_acq_device_originator.py
index 661f1ee38..ee3405bc0 100644
--- a/rec_to_nwb/processing/builder/originators/data_acq_device_originator.py
+++ b/rec_to_nwb/processing/builder/originators/data_acq_device_originator.py
@@ -1,10 +1,14 @@
import logging.config
import os
-from rec_to_nwb.processing.nwb.components.device.acq.fl_data_acq_device_manager import FlDataAcqDeviceManager
+from rec_to_nwb.processing.nwb.components.device.acq.fl_data_acq_device_manager import \
+ FlDataAcqDeviceManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
diff --git a/rec_to_nwb/processing/builder/originators/dio_originator.py b/rec_to_nwb/processing/builder/originators/dio_originator.py
index 2e4d79faf..098e56fd7 100644
--- a/rec_to_nwb/processing/builder/originators/dio_originator.py
+++ b/rec_to_nwb/processing/builder/originators/dio_originator.py
@@ -7,26 +7,33 @@
from rec_to_nwb.processing.nwb.components.dio.dio_manager import DioManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class DioOriginator:
- def __init__(self, metadata, datasets):
+ def __init__(self, metadata, datasets, convert_timestamps=True):
self.metadata = metadata
self.datasets = datasets
+ self.convert_timestamps = convert_timestamps
def make(self, nwb_content):
logger.info('DIO: Prepare directories')
- dio_directories = [single_dataset.get_data_path_from_dataset('DIO') for single_dataset in self.datasets]
+ dio_directories = [single_dataset.get_data_path_from_dataset(
+ 'DIO') for single_dataset in self.datasets]
logger.info('DIO: Prepare files')
- dio_files = DioFiles(dio_directories, self.metadata['behavioral_events'])
+ dio_files = DioFiles(
+ dio_directories, self.metadata['behavioral_events'])
logger.info('DIO: Retrieve data')
dio_manager = DioManager(
dio_files=dio_files.get_files(),
dio_metadata=self.metadata['behavioral_events'],
- continuous_time_files=self.__get_continuous_time_files()
+ continuous_time_files=self.__get_continuous_time_files(),
+ convert_timestamps=self.convert_timestamps
)
dio_data = dio_manager.get_dio()
logger.info('DIO: Building')
@@ -41,4 +48,5 @@ def make(self, nwb_content):
dio_injector.inject(behavioral_events, 'behavior')
def __get_continuous_time_files(self):
- return [single_dataset.get_continuous_time() for single_dataset in self.datasets]
\ No newline at end of file
+ return [single_dataset.get_continuous_time()
+ for single_dataset in self.datasets]
diff --git a/rec_to_nwb/processing/builder/originators/electrode_group_originator.py b/rec_to_nwb/processing/builder/originators/electrode_group_originator.py
index 091913c33..a53646a3e 100644
--- a/rec_to_nwb/processing/builder/originators/electrode_group_originator.py
+++ b/rec_to_nwb/processing/builder/originators/electrode_group_originator.py
@@ -1,20 +1,26 @@
-import os
import logging.config
+import os
-from rec_to_nwb.processing.nwb.components.electrode_group.electrode_group_factory import ElectrodeGroupFactory
-from rec_to_nwb.processing.nwb.components.electrode_group.electrode_group_injector import ElectrodeGroupInjector
+from rec_to_nwb.processing.nwb.components.electrode_group.electrode_group_factory import \
+ ElectrodeGroupFactory
+from rec_to_nwb.processing.nwb.components.electrode_group.electrode_group_injector import \
+ ElectrodeGroupInjector
from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group_manager import \
FlNwbElectrodeGroupManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class ElectrodeGroupOriginator:
def __init__(self, metadata):
- self.fl_nwb_electrode_group_manager = FlNwbElectrodeGroupManager(metadata['electrode groups'])
+ self.fl_nwb_electrode_group_manager = FlNwbElectrodeGroupManager(
+ metadata['electrode_groups'])
self.electrode_group_creator = ElectrodeGroupFactory()
self.electrode_group_injector = ElectrodeGroupInjector()
@@ -26,9 +32,11 @@ def make(self, nwb_content, probes, electrode_groups_valid_map):
)
logger.info('ElectrodeGroups: Creating')
nwb_electrode_groups = [
- self.electrode_group_creator.create_nwb_electrode_group(nwb_electrode_group)
+ self.electrode_group_creator.create_nwb_electrode_group(
+ nwb_electrode_group)
for nwb_electrode_group in fl_nwb_electrode_groups
]
logger.info('ElectrodeGroups: Injecting into NWB')
- self.electrode_group_injector.inject_all_electrode_groups(nwb_content, nwb_electrode_groups)
+ self.electrode_group_injector.inject_all_electrode_groups(
+ nwb_content, nwb_electrode_groups)
return nwb_electrode_groups
diff --git a/rec_to_nwb/processing/builder/originators/electrodes_extension_originator.py b/rec_to_nwb/processing/builder/originators/electrodes_extension_originator.py
index 06a7af71b..0a08095c0 100644
--- a/rec_to_nwb/processing/builder/originators/electrodes_extension_originator.py
+++ b/rec_to_nwb/processing/builder/originators/electrodes_extension_originator.py
@@ -7,7 +7,10 @@
FlElectrodeExtensionManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -23,7 +26,8 @@ def __init__(self, probes, metadata, header):
def make(self, nwb_content, electrodes_valid_map):
logger.info('FlElectrodesExtensions: Building')
- fl_electrode_extension = self.fl_electrode_extension_manager.get_fl_electrodes_extension(electrodes_valid_map)
+ fl_electrode_extension = self.fl_electrode_extension_manager.get_fl_electrodes_extension(
+ electrodes_valid_map)
logger.info('FlElectrodesExtensions: Injecting into NWB')
self.electrode_extension_injector.inject_extensions(
nwb_content,
diff --git a/rec_to_nwb/processing/builder/originators/electrodes_originator.py b/rec_to_nwb/processing/builder/originators/electrodes_originator.py
index b3563f5b6..3d5446531 100644
--- a/rec_to_nwb/processing/builder/originators/electrodes_originator.py
+++ b/rec_to_nwb/processing/builder/originators/electrodes_originator.py
@@ -1,21 +1,28 @@
-import os
import logging.config
+import os
-from rec_to_nwb.processing.nwb.components.electrodes.electrode_creator import ElectrodesCreator
-from rec_to_nwb.processing.nwb.components.electrodes.fl_electrode_manager import FlElectrodeManager
+from rec_to_nwb.processing.nwb.components.electrodes.electrode_creator import \
+ ElectrodesCreator
+from rec_to_nwb.processing.nwb.components.electrodes.fl_electrode_manager import \
+ FlElectrodeManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class ElectrodesOriginator:
def __init__(self, probes, metadata):
- self.fl_electrode_manager = FlElectrodeManager(probes, metadata['electrode groups'])
+ self.fl_electrode_manager = FlElectrodeManager(
+ probes, metadata['electrode_groups'])
self.electrode_creator = ElectrodesCreator()
- def make(self, nwb_content, electrode_groups, electrodes_valid_map, electrode_groups_valid_map):
+ def make(self, nwb_content, electrode_groups, electrodes_valid_map,
+ electrode_groups_valid_map):
logger.info('Electrodes: Building')
fl_electrodes = self.fl_electrode_manager.get_fl_electrodes(
electrode_groups=electrode_groups,
@@ -23,4 +30,5 @@ def make(self, nwb_content, electrode_groups, electrodes_valid_map, electrode_gr
electrode_groups_valid_map=electrode_groups_valid_map
)
logger.info('Electrodes: Creating&Injecting into NWB')
- [self.electrode_creator.create(nwb_content, fl_electrode) for fl_electrode in fl_electrodes]
+ [self.electrode_creator.create(nwb_content, fl_electrode)
+ for fl_electrode in fl_electrodes]
diff --git a/rec_to_nwb/processing/builder/originators/epochs_originator.py b/rec_to_nwb/processing/builder/originators/epochs_originator.py
index cf9037db3..b3595eba8 100644
--- a/rec_to_nwb/processing/builder/originators/epochs_originator.py
+++ b/rec_to_nwb/processing/builder/originators/epochs_originator.py
@@ -1,11 +1,16 @@
import logging.config
import os
-from rec_to_nwb.processing.nwb.components.epochs.epochs_injector import EpochsInjector
-from rec_to_nwb.processing.nwb.components.epochs.fl_epochs_manager import FlEpochsManager
+from rec_to_nwb.processing.nwb.components.epochs.epochs_injector import \
+ EpochsInjector
+from rec_to_nwb.processing.nwb.components.epochs.fl_epochs_manager import \
+ FlEpochsManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -20,4 +25,4 @@ def make(self, nwb_content):
logger.info('Epochs: Creating')
epochs = fl_epochs_manager.get_epochs()
logger.info('Epochs: Injecting')
- EpochsInjector.inject(epochs, nwb_content)
\ No newline at end of file
+ EpochsInjector.inject(epochs, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/header_device_originator.py b/rec_to_nwb/processing/builder/originators/header_device_originator.py
index ab53ead2b..34811e0be 100644
--- a/rec_to_nwb/processing/builder/originators/header_device_originator.py
+++ b/rec_to_nwb/processing/builder/originators/header_device_originator.py
@@ -2,19 +2,26 @@
import os
from rec_to_nwb.processing.header.module.header import Header
-from rec_to_nwb.processing.nwb.components.device.device_factory import DeviceFactory
-from rec_to_nwb.processing.nwb.components.device.device_injector import DeviceInjector
-from rec_to_nwb.processing.nwb.components.device.header.fl_header_device_manager import FlHeaderDeviceManager
+from rec_to_nwb.processing.nwb.components.device.device_factory import \
+ DeviceFactory
+from rec_to_nwb.processing.nwb.components.device.device_injector import \
+ DeviceInjector
+from rec_to_nwb.processing.nwb.components.device.header.fl_header_device_manager import \
+ FlHeaderDeviceManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class HeaderDeviceOriginator:
def __init__(self, header, metadata):
- header_path = str(path) + '/../../../data/' + metadata['default_header_file_path']
+ header_path = str(path) + '/../../../data/' + \
+ metadata['default_header_file_path']
default_header = Header(header_path)
self.fl_header_device_manager = FlHeaderDeviceManager(
'header_device',
@@ -28,6 +35,7 @@ def make(self, nwb_content):
logger.info('HeaderDevice: Building')
fl_header_device = self.fl_header_device_manager.get_fl_header_device()
logger.info('HeaderDevice: Creating')
- header_device = self.device_factory.create_header_device(fl_header_device)
+ header_device = self.device_factory.create_header_device(
+ fl_header_device)
logger.info('HeaderDevice: Injecting into NWB')
self.device_injector.inject_all_devices(nwb_content, [header_device])
diff --git a/rec_to_nwb/processing/builder/originators/mda_invalid_time_originator.py b/rec_to_nwb/processing/builder/originators/mda_invalid_time_originator.py
index 1b0ab33ab..98d422536 100644
--- a/rec_to_nwb/processing/builder/originators/mda_invalid_time_originator.py
+++ b/rec_to_nwb/processing/builder/originators/mda_invalid_time_originator.py
@@ -1,11 +1,16 @@
-import os
import logging.config
+import os
-from rec_to_nwb.processing.nwb.components.mda.time.invalid.fl_mda_invalid_time_manager import FlMdaInvalidTimeManager
-from rec_to_nwb.processing.nwb.components.mda.time.invalid.mda_invalid_time_injector import MdaInvalidTimeInjector
+from rec_to_nwb.processing.nwb.components.mda.time.invalid.fl_mda_invalid_time_manager import \
+ FlMdaInvalidTimeManager
+from rec_to_nwb.processing.nwb.components.mda.time.invalid.mda_invalid_time_injector import \
+ MdaInvalidTimeInjector
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -13,13 +18,16 @@ class MdaInvalidTimeOriginator:
def __init__(self, header, metadata):
self.fl_mda_invalid_time_manager = FlMdaInvalidTimeManager(
- sampling_rate=float(header.configuration.hardware_configuration.sampling_rate),
+ sampling_rate=float(
+ header.configuration.hardware_configuration.sampling_rate),
metadata=metadata
)
self.mda_invalid_time_injector = MdaInvalidTimeInjector()
def make(self, nwb_content):
logger.info('MDA invalid times: Building')
- mda_invalid_times = self.fl_mda_invalid_time_manager.get_fl_mda_invalid_times(nwb_content)
+ mda_invalid_times = self.fl_mda_invalid_time_manager.get_fl_mda_invalid_times(
+ nwb_content)
logger.info('MDA invalid times: Injecting')
- self.mda_invalid_time_injector.inject_all(mda_invalid_times, nwb_content)
+ self.mda_invalid_time_injector.inject_all(
+ mda_invalid_times, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/mda_originator.py b/rec_to_nwb/processing/builder/originators/mda_originator.py
index 07d911767..c619516e0 100644
--- a/rec_to_nwb/processing/builder/originators/mda_originator.py
+++ b/rec_to_nwb/processing/builder/originators/mda_originator.py
@@ -1,12 +1,17 @@
-import os
import logging.config
+import os
-from rec_to_nwb.processing.nwb.components.mda.electrical_series_creator import ElectricalSeriesCreator
-from rec_to_nwb.processing.nwb.components.mda.fl_mda_manager import FlMdaManager
+from rec_to_nwb.processing.nwb.components.mda.electrical_series_creator import \
+ ElectricalSeriesCreator
+from rec_to_nwb.processing.nwb.components.mda.fl_mda_manager import \
+ FlMdaManager
from rec_to_nwb.processing.nwb.components.mda.mda_injector import MdaInjector
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -20,7 +25,8 @@ def make(self, nwb_content):
logger.info('MDA: Building')
fl_mda_manager = FlMdaManager(
nwb_content=nwb_content,
- sampling_rate=float(self.header.configuration.hardware_configuration.sampling_rate),
+ sampling_rate=float(
+ self.header.configuration.hardware_configuration.sampling_rate),
datasets=self.datasets,
conversion=self.metadata['raw_data_to_volts']
)
@@ -29,4 +35,4 @@ def make(self, nwb_content):
MdaInjector.inject_mda(
nwb_content=nwb_content,
electrical_series=ElectricalSeriesCreator.create_mda(fl_mda)
- )
\ No newline at end of file
+ )
diff --git a/rec_to_nwb/processing/builder/originators/mda_valid_time_originator.py b/rec_to_nwb/processing/builder/originators/mda_valid_time_originator.py
index 53a17c89a..38ce1fb0c 100644
--- a/rec_to_nwb/processing/builder/originators/mda_valid_time_originator.py
+++ b/rec_to_nwb/processing/builder/originators/mda_valid_time_originator.py
@@ -1,11 +1,16 @@
-import os
import logging.config
+import os
-from rec_to_nwb.processing.nwb.components.mda.time.valid.fl_mda_valid_time_manager import FlMdaValidTimeManager
-from rec_to_nwb.processing.nwb.components.mda.time.valid.mda_valid_time_injector import MdaValidTimeInjector
+from rec_to_nwb.processing.nwb.components.mda.time.valid.fl_mda_valid_time_manager import \
+ FlMdaValidTimeManager
+from rec_to_nwb.processing.nwb.components.mda.time.valid.mda_valid_time_injector import \
+ MdaValidTimeInjector
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -13,13 +18,15 @@ class MdaValidTimeOriginator:
def __init__(self, header, metadata):
self.fl_mda_valid_time_manager = FlMdaValidTimeManager(
- sampling_rate=float(header.configuration.hardware_configuration.sampling_rate),
+ sampling_rate=float(
+ header.configuration.hardware_configuration.sampling_rate),
metadata=metadata
)
self.mda_valid_time_injector = MdaValidTimeInjector()
def make(self, nwb_content):
logger.info('MDA valid times: Building')
- mda_valid_times = self.fl_mda_valid_time_manager.get_fl_mda_valid_times(nwb_content)
+ mda_valid_times = self.fl_mda_valid_time_manager.get_fl_mda_valid_times(
+ nwb_content)
logger.info('MDA valid times: Injecting')
self.mda_valid_time_injector.inject_all(mda_valid_times, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/old_analog_originator.py b/rec_to_nwb/processing/builder/originators/old_analog_originator.py
deleted file mode 100644
index 2b72f2260..000000000
--- a/rec_to_nwb/processing/builder/originators/old_analog_originator.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from rec_to_nwb.processing.nwb.components.analog.analog_creator import AnalogCreator
-from rec_to_nwb.processing.nwb.components.analog.analog_files import AnalogFiles
-from rec_to_nwb.processing.nwb.components.analog.analog_injector import AnalogInjector
-from rec_to_nwb.processing.nwb.components.analog.fl_analog_manager import FlAnalogManager
-from rec_to_nwb.processing.nwb.components.analog.old_fl_analog_manager import OldFlAnalogManager
-
-
-class OldAnalogOriginator:
-
- def __init__(self, datasets, metadata):
- self.datasets = datasets
- self.metadata = metadata
-
- def make(self, nwb_content):
- analog_directories = [single_dataset.get_data_path_from_dataset('analog') for single_dataset in self.datasets]
- analog_files = AnalogFiles(analog_directories)
- old_analog_manager = OldFlAnalogManager(
- analog_files=analog_files.get_files(),
- )
- fl_analog = old_analog_manager.get_analog()
- old_analog_injector = AnalogInjector(nwb_content)
- old_analog_injector.inject(AnalogCreator.create(fl_analog, self.metadata['units']['analog']), 'analog')
-
- def __get_continuous_time_files(self):
- return [single_dataset.get_continuous_time() for single_dataset in self.datasets]
diff --git a/rec_to_nwb/processing/builder/originators/old_dio_originator.py b/rec_to_nwb/processing/builder/originators/old_dio_originator.py
deleted file mode 100644
index 0db370747..000000000
--- a/rec_to_nwb/processing/builder/originators/old_dio_originator.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import logging.config
-import os
-
-from rec_to_nwb.processing.nwb.components.dio.dio_builder import DioBuilder
-from rec_to_nwb.processing.nwb.components.dio.dio_files import DioFiles
-from rec_to_nwb.processing.nwb.components.dio.dio_injector import DioInjector
-from rec_to_nwb.processing.nwb.components.dio.dio_manager import DioManager
-from rec_to_nwb.processing.nwb.components.dio.old_dio_manager import OldDioManager
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class OldDioOriginator:
-
- def __init__(self, metadata, datasets):
- self.metadata = metadata
- self.datasets = datasets
-
- def make(self, nwb_content):
- logger.info('DIO: Prepare directories')
- dio_directories = [single_dataset.get_data_path_from_dataset('DIO') for single_dataset in self.datasets]
- logger.info('DIO: Prepare files')
- dio_files = DioFiles(dio_directories, self.metadata['behavioral_events'])
- logger.info('DIO: Retrieve data')
- old_dio_manager = OldDioManager(
- dio_files=dio_files.get_files(),
- dio_metadata=self.metadata['behavioral_events'],
- )
- dio_data = old_dio_manager.get_dio()
- logger.info('DIO: Building')
- dio_builder = DioBuilder(
- dio_data,
- self.metadata['behavioral_events'],
- self.metadata['units']['behavioral_events']
- )
- behavioral_events = dio_builder.build()
- logger.info('DIO: Injecting into NWB')
- dio_injector = DioInjector(nwb_content)
- dio_injector.inject(behavioral_events, 'behavior')
-
- def __get_continuous_time_files(self):
- return [single_dataset.get_continuous_time() for single_dataset in self.datasets]
\ No newline at end of file
diff --git a/rec_to_nwb/processing/builder/originators/old_mda_invalid_time_originator.py b/rec_to_nwb/processing/builder/originators/old_mda_invalid_time_originator.py
deleted file mode 100644
index 1b0ab33ab..000000000
--- a/rec_to_nwb/processing/builder/originators/old_mda_invalid_time_originator.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import os
-import logging.config
-
-from rec_to_nwb.processing.nwb.components.mda.time.invalid.fl_mda_invalid_time_manager import FlMdaInvalidTimeManager
-from rec_to_nwb.processing.nwb.components.mda.time.invalid.mda_invalid_time_injector import MdaInvalidTimeInjector
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class MdaInvalidTimeOriginator:
-
- def __init__(self, header, metadata):
- self.fl_mda_invalid_time_manager = FlMdaInvalidTimeManager(
- sampling_rate=float(header.configuration.hardware_configuration.sampling_rate),
- metadata=metadata
- )
- self.mda_invalid_time_injector = MdaInvalidTimeInjector()
-
- def make(self, nwb_content):
- logger.info('MDA invalid times: Building')
- mda_invalid_times = self.fl_mda_invalid_time_manager.get_fl_mda_invalid_times(nwb_content)
- logger.info('MDA invalid times: Injecting')
- self.mda_invalid_time_injector.inject_all(mda_invalid_times, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/old_mda_originator.py b/rec_to_nwb/processing/builder/originators/old_mda_originator.py
deleted file mode 100644
index e731a4d4b..000000000
--- a/rec_to_nwb/processing/builder/originators/old_mda_originator.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import os
-import logging.config
-
-from rec_to_nwb.processing.nwb.components.mda.electrical_series_creator import ElectricalSeriesCreator
-from rec_to_nwb.processing.nwb.components.mda.fl_mda_manager import FlMdaManager
-from rec_to_nwb.processing.nwb.components.mda.mda_injector import MdaInjector
-from rec_to_nwb.processing.nwb.components.mda.old_fl_mda_manager import OldFlMdaManager
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class OldMdaOriginator:
- def __init__(self, datasets, header, metadata):
- self.datasets = datasets
- self.header = header
- self.metadata = metadata
-
- def make(self, nwb_content):
- logger.info('MDA: Building')
- old_fl_mda_manager = OldFlMdaManager(
- nwb_content=nwb_content,
- sampling_rate=float(self.header.configuration.hardware_configuration.sampling_rate),
- datasets=self.datasets,
- conversion=self.metadata['raw_data_to_volts']
- )
- fl_mda = old_fl_mda_manager.get_data()
- logger.info('MDA: Injecting')
- MdaInjector.inject_mda(
- nwb_content=nwb_content,
- electrical_series=ElectricalSeriesCreator.create_mda(fl_mda)
- )
\ No newline at end of file
diff --git a/rec_to_nwb/processing/builder/originators/old_mda_valid_time_originator.py b/rec_to_nwb/processing/builder/originators/old_mda_valid_time_originator.py
deleted file mode 100644
index 53a17c89a..000000000
--- a/rec_to_nwb/processing/builder/originators/old_mda_valid_time_originator.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import os
-import logging.config
-
-from rec_to_nwb.processing.nwb.components.mda.time.valid.fl_mda_valid_time_manager import FlMdaValidTimeManager
-from rec_to_nwb.processing.nwb.components.mda.time.valid.mda_valid_time_injector import MdaValidTimeInjector
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class MdaValidTimeOriginator:
-
- def __init__(self, header, metadata):
- self.fl_mda_valid_time_manager = FlMdaValidTimeManager(
- sampling_rate=float(header.configuration.hardware_configuration.sampling_rate),
- metadata=metadata
- )
- self.mda_valid_time_injector = MdaValidTimeInjector()
-
- def make(self, nwb_content):
- logger.info('MDA valid times: Building')
- mda_valid_times = self.fl_mda_valid_time_manager.get_fl_mda_valid_times(nwb_content)
- logger.info('MDA valid times: Injecting')
- self.mda_valid_time_injector.inject_all(mda_valid_times, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/old_pos_invalid_originator.py b/rec_to_nwb/processing/builder/originators/old_pos_invalid_originator.py
deleted file mode 100644
index 720c93162..000000000
--- a/rec_to_nwb/processing/builder/originators/old_pos_invalid_originator.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-import logging.config
-
-from rec_to_nwb.processing.nwb.components.position.time.invalid.fl_pos_invalid_time_manager import \
- FlPosInvalidTimeManager
-from rec_to_nwb.processing.nwb.components.position.time.invalid.pos_invalid_time_injector import PosInvalidTimeInjector
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class PosInvalidTimeOriginator:
-
- def __init__(self, metadata):
- self.fl_pos_invalid_time_manager = FlPosInvalidTimeManager(metadata)
- self.pos_invalid_time_injector = PosInvalidTimeInjector()
-
- def make(self, nwb_content):
- logger.info('POS invalid times: Building')
- pos_invalid_times = self.fl_pos_invalid_time_manager.get_fl_pos_invalid_times(nwb_content)
- logger.info('POS invalid times: Injecting')
- self.pos_invalid_time_injector.inject_all(pos_invalid_times, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/old_pos_valid_time_originator.py b/rec_to_nwb/processing/builder/originators/old_pos_valid_time_originator.py
deleted file mode 100644
index c603a8014..000000000
--- a/rec_to_nwb/processing/builder/originators/old_pos_valid_time_originator.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import os
-import logging.config
-
-from rec_to_nwb.processing.nwb.components.position.time.valid.fl_pos_valid_time_manager import FlPosValidTimeManager
-from rec_to_nwb.processing.nwb.components.position.time.valid.pos_valid_time_injector import PosValidTimeInjector
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class PosValidTimeOriginator:
-
- def __init__(self, metadata):
- self.fl_pos_valid_time_manager = FlPosValidTimeManager(metadata)
- self.pos_valid_time_injector = PosValidTimeInjector()
-
- def make(self, nwb_content):
- logger.info('POS valid times: Building')
- pos_valid_times = self.fl_pos_valid_time_manager.get_fl_pos_valid_times(nwb_content)
- logger.info('POS valid times: Injecting')
- self.pos_valid_time_injector.inject_all(pos_valid_times, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/old_position_originator.py b/rec_to_nwb/processing/builder/originators/old_position_originator.py
deleted file mode 100644
index f9eed15a6..000000000
--- a/rec_to_nwb/processing/builder/originators/old_position_originator.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import logging.config
-import os
-
-from pynwb import NWBFile
-
-from rec_to_nwb.processing.nwb.components.position.old_fl_position_manager import OldFlPositionManager
-from rec_to_nwb.processing.nwb.components.position.position_creator import PositionCreator
-from rec_to_nwb.processing.nwb.components.processing_module.processing_module_creator import ProcessingModuleCreator
-from rec_to_nwb.processing.tools.beartype.beartype import beartype
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class OldPositionOriginator:
-
- @beartype
- def __init__(self, datasets: list, metadata: dict, dataset_names: list, process_timestamps: bool):
- self.pm_creator = ProcessingModuleCreator('behavior', 'Contains all behavior-related data')
- self.old_fl_position_manager = OldFlPositionManager(datasets, metadata, dataset_names, process_timestamps)
- self.position_creator = PositionCreator()
-
- @beartype
- def make(self, nwb_content: NWBFile):
- logger.info('Position: Building')
- fl_positions = self.old_fl_position_manager.get_fl_positions()
- logger.info('Position: Creating')
- position = self.position_creator.create_all(fl_positions)
- logger.info('Position: Injecting into ProcessingModule')
- nwb_content.processing['behavior'].add(position)
diff --git a/rec_to_nwb/processing/builder/originators/old_video_files_originator.py b/rec_to_nwb/processing/builder/originators/old_video_files_originator.py
deleted file mode 100644
index 0d9cefbd0..000000000
--- a/rec_to_nwb/processing/builder/originators/old_video_files_originator.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_manager import FlVideoFilesManager
-from rec_to_nwb.processing.nwb.components.video_files.old_fl_video_files_manager import OldFlVideoFilesManager
-from rec_to_nwb.processing.nwb.components.video_files.video_files_creator import VideoFilesCreator
-from rec_to_nwb.processing.nwb.components.video_files.video_files_injector import VideoFilesInjector
-
-
-class OldVideoFilesOriginator:
-
- def __init__(self, raw_data_path, video_path, video_files_metadata):
- self.video_directory = video_path
- self.old_fl_video_files_manager = OldFlVideoFilesManager(raw_data_path, video_path, video_files_metadata)
-
- def make(self, nwb_content):
- fl_video_files = self.fl_video_files_manager.get_video_files()
- image_series_list = [
- VideoFilesCreator.create(fl_video_file, self.video_directory, nwb_content)
- for fl_video_file in fl_video_files
- ]
- VideoFilesInjector.inject_all(nwb_content, image_series_list)
-
diff --git a/rec_to_nwb/processing/builder/originators/pos_invalid_originator.py b/rec_to_nwb/processing/builder/originators/pos_invalid_originator.py
index 720c93162..b4e25f52d 100644
--- a/rec_to_nwb/processing/builder/originators/pos_invalid_originator.py
+++ b/rec_to_nwb/processing/builder/originators/pos_invalid_originator.py
@@ -1,12 +1,16 @@
-import os
import logging.config
+import os
from rec_to_nwb.processing.nwb.components.position.time.invalid.fl_pos_invalid_time_manager import \
FlPosInvalidTimeManager
-from rec_to_nwb.processing.nwb.components.position.time.invalid.pos_invalid_time_injector import PosInvalidTimeInjector
+from rec_to_nwb.processing.nwb.components.position.time.invalid.pos_invalid_time_injector import \
+ PosInvalidTimeInjector
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -18,6 +22,8 @@ def __init__(self, metadata):
def make(self, nwb_content):
logger.info('POS invalid times: Building')
- pos_invalid_times = self.fl_pos_invalid_time_manager.get_fl_pos_invalid_times(nwb_content)
+ pos_invalid_times = self.fl_pos_invalid_time_manager.get_fl_pos_invalid_times(
+ nwb_content)
logger.info('POS invalid times: Injecting')
- self.pos_invalid_time_injector.inject_all(pos_invalid_times, nwb_content)
+ self.pos_invalid_time_injector.inject_all(
+ pos_invalid_times, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/pos_valid_time_originator.py b/rec_to_nwb/processing/builder/originators/pos_valid_time_originator.py
index c603a8014..54ffffa2b 100644
--- a/rec_to_nwb/processing/builder/originators/pos_valid_time_originator.py
+++ b/rec_to_nwb/processing/builder/originators/pos_valid_time_originator.py
@@ -1,11 +1,16 @@
-import os
import logging.config
+import os
-from rec_to_nwb.processing.nwb.components.position.time.valid.fl_pos_valid_time_manager import FlPosValidTimeManager
-from rec_to_nwb.processing.nwb.components.position.time.valid.pos_valid_time_injector import PosValidTimeInjector
+from rec_to_nwb.processing.nwb.components.position.time.valid.fl_pos_valid_time_manager import \
+ FlPosValidTimeManager
+from rec_to_nwb.processing.nwb.components.position.time.valid.pos_valid_time_injector import \
+ PosValidTimeInjector
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -17,6 +22,7 @@ def __init__(self, metadata):
def make(self, nwb_content):
logger.info('POS valid times: Building')
- pos_valid_times = self.fl_pos_valid_time_manager.get_fl_pos_valid_times(nwb_content)
+ pos_valid_times = self.fl_pos_valid_time_manager.get_fl_pos_valid_times(
+ nwb_content)
logger.info('POS valid times: Injecting')
self.pos_valid_time_injector.inject_all(pos_valid_times, nwb_content)
diff --git a/rec_to_nwb/processing/builder/originators/position_originator.py b/rec_to_nwb/processing/builder/originators/position_originator.py
index c91807842..ce0c4f10d 100644
--- a/rec_to_nwb/processing/builder/originators/position_originator.py
+++ b/rec_to_nwb/processing/builder/originators/position_originator.py
@@ -1,31 +1,627 @@
+import glob
import logging.config
import os
-from pynwb import NWBFile
-
-from rec_to_nwb.processing.nwb.components.position.fl_position_manager import FlPositionManager
-from rec_to_nwb.processing.nwb.components.position.position_creator import PositionCreator
-from rec_to_nwb.processing.nwb.components.processing_module.processing_module_creator import ProcessingModuleCreator
+import numpy as np
+import pandas as pd
+from pynwb import NWBFile, ProcessingModule, TimeSeries
+from pynwb.behavior import Position
+from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
+from rec_to_nwb.processing.exceptions.invalid_metadata_exception import (
+ InvalidMetadataException,
+)
from rec_to_nwb.processing.tools.beartype.beartype import beartype
+from scipy.ndimage import label
+from scipy.stats import linregress
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir, os.pardir, "logging.conf"),
+ disable_existing_loggers=False,
+)
logger = logging.getLogger(__name__)
+NANOSECONDS_PER_SECOND = 1e9
-class PositionOriginator:
- @beartype
- def __init__(self, datasets: list, metadata: dict, dataset_names: list, process_timestamps: bool):
- self.pm_creator = ProcessingModuleCreator('behavior', 'Contains all behavior-related data')
- self.fl_position_manager = FlPositionManager(datasets, metadata, dataset_names, process_timestamps)
- self.position_creator = PositionCreator()
+class PositionOriginator:
+ def __init__(
+ self, datasets: list, metadata: dict, dataset_names: list, ptp_enabled: bool
+ ):
+ self.datasets = datasets
+ self.dataset_names = dataset_names
+ self.metadata = metadata
+ self.pm_creator = ProcessingModule(
+ "behavior", "Contains all behavior-related data"
+ )
+ self.ptp_enabled = ptp_enabled
- @beartype
def make(self, nwb_content: NWBFile):
- logger.info('Position: Building')
- fl_positions = self.fl_position_manager.get_fl_positions()
- logger.info('Position: Creating')
- position = self.position_creator.create_all(fl_positions)
- logger.info('Position: Injecting into ProcessingModule')
- nwb_content.processing['behavior'].add(position)
+ position = Position(name="position")
+
+ cameras_ids = get_cameras_ids(self.dataset_names, self.metadata)
+ meters_per_pixels = get_meters_per_pixels(cameras_ids, self.metadata)
+
+ first_timestamps = []
+
+ for dataset_ind, dataset in enumerate(self.datasets):
+ pos_path = dataset.data["pos"]
+ conversion = meters_per_pixels[dataset_ind]
+ try:
+ position_tracking_path = glob.glob(
+ os.path.join(pos_path, "*.pos_online.dat")
+ )
+ position_df = self.get_position_with_corrected_timestamps(
+ position_tracking_path[0], self.ptp_enabled
+ )
+ # Multi-position split.
+ # TODO: generalize key names?
+ key_lists = [
+ [
+ "xloc",
+ "yloc",
+ ], # led 0
+ [
+ "xloc2",
+ "yloc2",
+ ],
+ ] # led 1
+ for led_number, valid_keys in enumerate(key_lists):
+ key_set = [
+ key for key in position_df.columns.tolist() if key in valid_keys
+ ]
+ if len(key_set) > 0:
+ position.create_spatial_series(
+ name=f"led_{led_number}_series_{dataset_ind}",
+ description=", ".join(["xloc", "yloc"]),
+ data=np.asarray(position_df[key_set]),
+ conversion=conversion,
+ reference_frame="Upper left corner of video frame",
+ timestamps=np.asarray(position_df.index),
+ )
+ first_timestamps.append(position_df.index[0])
+ # add the video frame index as a new processing module
+ if "position_frame_index" not in nwb_content.processing:
+ nwb_content.create_processing_module(
+ name="position_frame_index",
+ description="stores video frame index for each position timestep",
+ )
+ # add timeseries for each frame index set (once per series because led's share timestamps)
+ nwb_content.processing["position_frame_index"].add(
+ TimeSeries(
+ name=f"series_{dataset_ind}",
+ data=np.asarray(position_df["video_frame_ind"]),
+ unit="N/A",
+ timestamps=np.asarray(position_df.index),
+ )
+ )
+ # add the video non-repeat timestamp labels as a new processing module
+ if "non_repeat_timestamp_labels" not in nwb_content.processing:
+ nwb_content.create_processing_module(
+ name="non_repeat_timestamp_labels",
+ description="stores non_repeat_labels for each position timestep",
+ )
+ # add timeseries for each non-repeat timestamp labels set (once per series because led's share timestamps)
+ nwb_content.processing["non_repeat_timestamp_labels"].add(
+ TimeSeries(
+ name=f"series_{dataset_ind}",
+ data=np.asarray(position_df["non_repeat_timestamp_labels"]),
+ unit="N/A",
+ timestamps=np.asarray(position_df.index),
+ )
+ )
+
+ except IndexError:
+ video_file_path = glob.glob(
+ os.path.join(pos_path, "*.pos_cameraHWFrameCount.dat")
+ )
+ video_df = self.get_corrected_timestamps_without_position(
+ video_file_path[0], self.ptp_enabled
+ )
+ position.create_spatial_series(
+ name=f"series_{dataset_ind}",
+ description=", ".join(video_df.columns.tolist()),
+ data=np.asarray(video_df),
+ conversion=conversion,
+ reference_frame="Upper left corner of video frame",
+ timestamps=np.asarray(video_df.index),
+ )
+ first_timestamps.append(video_df.index[0])
+
+ # check if timestamps are in order
+ first_timestamps = np.asarray(first_timestamps)
+ assert np.all(first_timestamps[:-1] < first_timestamps[1:])
+
+ logger.info("Position: Injecting into Processing Module")
+ nwb_content.processing["behavior"].add(position)
+
+ @staticmethod
+ def get_position_with_corrected_timestamps(position_tracking_path, ptp_enabled):
+ logger.info(os.path.split(position_tracking_path)[-1])
+
+ # Get position tracking information
+ position_tracking = pd.DataFrame(
+ readTrodesExtractedDataFile(position_tracking_path)["data"]
+ ).set_index("time")
+ is_repeat_timestamp = detect_repeat_timestamps(position_tracking.index)
+ position_tracking = position_tracking.iloc[~is_repeat_timestamp]
+
+ # Get video information
+ video_info = get_video_info(position_tracking_path)
+ # On AVT cameras, HWFrame counts wraps to 0 above this value.
+ AVT_camHWframeCount_wrapval = 65535
+ video_info["HWframeCount"] = np.unwrap(
+ video_info["HWframeCount"].astype(np.int32),
+ period=AVT_camHWframeCount_wrapval,
+ )
+
+ # Keep track of video frames
+ video_info["video_frame_ind"] = np.arange(len(video_info))
+
+ # Disconnects manifest as repeats in the trodes time index
+ (
+ non_repeat_timestamp_labels,
+ non_repeat_timestamp_labels_id,
+ ) = detect_trodes_time_repeats_or_frame_jumps(
+ video_info.index, video_info.HWframeCount
+ )
+ logging.info(f"non_repeat_timestamp_labels = {non_repeat_timestamp_labels_id}")
+ video_info["non_repeat_timestamp_labels"] = non_repeat_timestamp_labels
+ video_info = video_info.loc[video_info.non_repeat_timestamp_labels > 0]
+
+ # Get the timestamps from the neural data
+ mcu_neural_timestamps = get_mcu_neural_timestamps(position_tracking_path)
+
+ # Get the camera time from the DIOs
+ dio_camera_ticks = find_camera_dio_channel(position_tracking_path, video_info)
+ is_valid_tick = np.isin(dio_camera_ticks, mcu_neural_timestamps.index)
+ dio_systime = np.asarray(
+ mcu_neural_timestamps.loc[dio_camera_ticks[is_valid_tick]]
+ )
+
+ if len(dio_systime) > 0:
+ # The DIOs and camera frames are initially unaligned. There is a
+ # half second pause at the start to allow for alignment.
+ pause_mid_time = find_acquisition_timing_pause(dio_systime)
+
+ # Estimate the frame rate from the DIO camera ticks as a sanity check.
+ frame_rate_from_dio = get_framerate(
+ dio_systime[dio_systime > pause_mid_time]
+ )
+ logger.info(
+ "Camera frame rate estimated from DIO camera ticks:"
+ f" {frame_rate_from_dio:0.1f} frames/s"
+ )
+ else:
+ pause_mid_time = None
+
+ # Match the camera frames to the position tracking
+ # Number of video frames can be different from online tracking because
+ # online tracking can be started or stopped before video is stopped.
+ # Additionally, for offline tracking, frames can be skipped if the
+ # frame is labeled as bad.
+ video_position_info = pd.merge(
+ video_info, position_tracking, right_index=True, left_index=True, how="left"
+ )
+
+ if ptp_enabled:
+ logger.info("PTP detected")
+ ptp_systime = np.asarray(video_position_info.HWTimestamp)
+
+ if pause_mid_time is None:
+ # estimate pause_mid_time from ptp timestamps if dio was missing
+ pause_mid_time = find_acquisition_timing_pause(ptp_systime)
+
+ frame_rate_from_ptp = get_framerate(
+ ptp_systime[ptp_systime > pause_mid_time]
+ )
+ logger.info(
+ "Camera frame rate estimated from PTP:"
+ f" {frame_rate_from_ptp:0.1f} frames/s"
+ )
+ # Convert from integer nanoseconds to float seconds
+ ptp_timestamps = pd.Index(ptp_systime / NANOSECONDS_PER_SECOND, name="time")
+ position_tracking = video_position_info.drop(
+ columns=["HWframeCount", "HWTimestamp"]
+ ).set_index(ptp_timestamps)
+
+ # Ignore positions before the timing pause.
+ is_post_pause = (
+ position_tracking.index > pause_mid_time / NANOSECONDS_PER_SECOND
+ )
+ position_tracking = position_tracking.iloc[is_post_pause]
+
+ return position_tracking
+ else:
+ logger.info("PTP not detected")
+ frame_count = np.asarray(video_position_info.HWframeCount)
+
+ camera_systime, is_valid_camera_time = estimate_camera_time_from_mcu_time(
+ video_position_info, mcu_neural_timestamps
+ )
+
+ (
+ dio_systime,
+ frame_count,
+ is_valid_camera_time,
+ camera_systime,
+ ) = remove_acquisition_timing_pause_non_ptp(
+ dio_systime,
+ frame_count,
+ camera_systime,
+ is_valid_camera_time,
+ pause_mid_time,
+ )
+ video_position_info = video_position_info.iloc[is_valid_camera_time]
+
+ frame_rate_from_camera_systime = get_framerate(camera_systime)
+ logger.info(
+ "Camera frame rate estimated from MCU timestamps:"
+ f" {frame_rate_from_camera_systime:0.1f} frames/s"
+ )
+
+ camera_to_mcu_lag = estimate_camera_to_mcu_lag(
+ camera_systime, dio_systime, len(non_repeat_timestamp_labels_id)
+ )
+
+ corrected_camera_systime = []
+ for id in non_repeat_timestamp_labels_id:
+ is_chunk = video_position_info.non_repeat_timestamp_labels == id
+ corrected_camera_systime.append(
+ correct_timestamps_for_camera_to_mcu_lag(
+ frame_count[is_chunk],
+ camera_systime[is_chunk],
+ camera_to_mcu_lag,
+ )
+ )
+ corrected_camera_systime = np.concatenate(corrected_camera_systime)
+
+ return video_position_info.set_index(
+ pd.Index(corrected_camera_systime, name="time")
+ )
+
+ @staticmethod
+ def get_corrected_timestamps_without_position(hw_frame_count_path, ptp_enabled):
+ video_info = readTrodesExtractedDataFile(hw_frame_count_path)
+ video_info = pd.DataFrame(video_info["data"]).set_index("PosTimestamp")
+ # On AVT cameras, HWFrame counts wraps to 0 above this value.
+ AVT_camHWframeCount_wrapval = np.iinfo(np.uint16).max
+ video_info["HWframeCount"] = np.unwrap(
+ video_info["HWframeCount"].astype(np.int32),
+ period=AVT_camHWframeCount_wrapval,
+ )
+
+ # Keep track of video frames
+ video_info["video_frame_ind"] = np.arange(len(video_info))
+
+ # Get the timestamps from the neural data
+ mcu_neural_timestamps = get_mcu_neural_timestamps(hw_frame_count_path)
+
+ # Get the camera time from the DIOs
+ try:
+ dio_camera_ticks = find_camera_dio_channel(hw_frame_count_path, video_info)
+ is_valid_tick = np.isin(dio_camera_ticks, mcu_neural_timestamps.index)
+ dio_systime = np.asarray(
+ mcu_neural_timestamps.loc[dio_camera_ticks[is_valid_tick]]
+ )
+
+ # The DIOs and camera frames are initially unaligned. There is a
+ # half second pause at the start to allow for alignment.
+ pause_mid_time = find_acquisition_timing_pause(dio_systime)
+ except IndexError:
+ logger.warning("No DIO camera ticks found...")
+ pause_mid_time = -1.0
+
+ if not ptp_enabled:
+ raise ValueError(
+ "No DIO camera ticks found and PTP not enabled. Cannot infer position timestamps."
+ )
+
+ if ptp_enabled:
+ ptp_timestamps = pd.Index(
+ video_info.HWTimestamp / NANOSECONDS_PER_SECOND, name="time"
+ )
+ video_info = video_info.set_index(ptp_timestamps)
+
+ # Ignore positions before the timing pause.
+ is_post_pause = video_info.index > pause_mid_time / NANOSECONDS_PER_SECOND
+ video_info = video_info.iloc[is_post_pause].drop(
+ columns=["HWframeCount", "HWTimestamp"]
+ )
+ return video_info
+ else:
+ raise NotImplementedError
+
+
+def find_camera_dio_channel(position_tracking_path, video_info):
+ """Find the camera DIO by looping through all the DIOs
+ and finding the right number of DIO pulses.
+
+ Parameters
+ ----------
+ position_tracking_path : str
+ position_tracking : pd.DataFrame, shape (n_camera_frames, 5)
+
+ Returns
+ -------
+ camera_dio_times : pd.Series, shape (n_dio_pulse_state_changes,)
+ Trodes time of dio ticks
+
+ """
+ head, tail = os.path.split(position_tracking_path)
+ dio_paths = glob.glob(
+ os.path.join(
+ os.path.split(head)[0],
+ tail.split(".")[0] + ".DIO",
+ tail.split(".")[0] + "*.dat",
+ )
+ )
+
+ n_ticks = np.asarray(
+ [
+ pd.DataFrame(readTrodesExtractedDataFile(dio_file)["data"]).state.sum()
+ for dio_file in dio_paths
+ ]
+ )
+
+ n_camera_frames = video_info.shape[0]
+ position_ticks_file_ind = np.argmin(np.abs(n_ticks - n_camera_frames))
+
+ if (n_ticks[position_ticks_file_ind] < 0.5 * n_camera_frames) or (
+ n_ticks[position_ticks_file_ind] > 1.5 * n_camera_frames
+ ):
+ logger.warning(
+ "Likely could not find camera tick DIO channel."
+ f"In the most likely dio file {dio_paths[position_ticks_file_ind]},"
+ f"there are {n_ticks[position_ticks_file_ind]} ticks"
+ f" and the position file has {n_camera_frames} camera frames."
+ )
+
+ camera_ticks_dio = pd.DataFrame(
+ readTrodesExtractedDataFile(dio_paths[position_ticks_file_ind])["data"]
+ )
+
+ return camera_ticks_dio.loc[camera_ticks_dio.state == 1].time
+
+
+def get_video_info(position_tracking_path):
+ """Get video PTP timestamps if they exist.
+
+
+ Parameters
+ ----------
+ position_tracking_path : str
+
+ Returns
+ -------
+ video_info : pd.DataFrame, shape (n_camera_frames, 2)
+ PosTimestamp: unadjusted postimestamps. UINT32
+ HWframeCount: integer count of frames acquired by camera
+ (rolls over at 65535; can be used to detect dropped frames). UINT32
+ HWTimestamp: POSIX time in nanoseconds, synchronized to PC sysclock via PTP. UINT64.
+
+ """
+ video_info = readTrodesExtractedDataFile(
+ position_tracking_path.replace(".pos_online.dat", ".pos_cameraHWFrameCount.dat")
+ )
+ return pd.DataFrame(video_info["data"]).set_index("PosTimestamp")
+
+
+def get_mcu_neural_timestamps(position_tracking_path):
+ """Neural timestamps.
+
+ Parameters
+ ----------
+ position_tracking_path : str
+
+ Returns
+ -------
+ mcu_neural_timestampss : pd.DataFrame
+ trodestime uint32
+ adjusted_systime int64
+
+ """
+ head, tail = os.path.split(position_tracking_path)
+ mcu_neural_timestamps_path = os.path.join(
+ os.path.split(head)[0],
+ tail.split(".")[0] + ".time",
+ tail.split(".")[0] + ".continuoustime.dat",
+ )
+ cont_time = readTrodesExtractedDataFile(mcu_neural_timestamps_path)
+
+ return pd.DataFrame(cont_time["data"]).set_index("trodestime").adjusted_systime
+
+
+def get_framerate(timestamps):
+ """Frames per second"""
+ timestamps = np.asarray(timestamps)
+ return NANOSECONDS_PER_SECOND / np.median(np.diff(timestamps))
+
+
+def find_acquisition_timing_pause(
+ timestamps, min_duration=0.4, max_duration=1.0, n_search=100
+):
+ """Landmark timing 'gap' (0.5 s pause in video stream) parameters
+
+ Parameters
+ ----------
+ timestamps : int64
+ min_duration : minimum duratino of gap (in seconds)
+ max_duration : maximum duratino of gap (in seconds)
+ n_search : search only the first `n_search` entries
+
+ Returns
+ -------
+ pause_mid_time
+ Midpoint time of timing pause
+
+ """
+ timestamps = np.asarray(timestamps)
+ timestamp_difference = np.diff(timestamps[:n_search] / NANOSECONDS_PER_SECOND)
+ is_valid_gap = (timestamp_difference > min_duration) & (
+ timestamp_difference < max_duration
+ )
+ pause_start_ind = np.nonzero(is_valid_gap)[0][0]
+ pause_end_ind = pause_start_ind + 1
+ pause_mid_time = (
+ timestamps[pause_start_ind]
+ + (timestamps[pause_end_ind] - timestamps[pause_start_ind]) // 2
+ )
+
+ return pause_mid_time
+
+
+def find_large_frame_jumps(frame_count, min_frame_jump=15):
+ """Want to avoid regressing over large frame count skips"""
+ frame_count = np.asarray(frame_count)
+
+ is_large_frame_jump = np.insert(np.diff(frame_count) > min_frame_jump, 0, False)
+
+ logger.info(f"big frame jumps: {np.nonzero(is_large_frame_jump)[0]}")
+
+ return is_large_frame_jump
+
+
+def detect_repeat_timestamps(timestamps):
+ return np.insert(timestamps[:-1] >= timestamps[1:], 0, False)
+
+
+def detect_trodes_time_repeats_or_frame_jumps(trodes_time, frame_count):
+ """If a trodes time index repeats, then the Trodes clock has frozen
+ due to headstage disconnects."""
+ trodes_time = np.asarray(trodes_time)
+ is_repeat_timestamp = detect_repeat_timestamps(trodes_time)
+ logger.info(f"repeat timestamps ind: {np.nonzero(is_repeat_timestamp)[0]}")
+
+ is_large_frame_jump = find_large_frame_jumps(frame_count)
+ is_repeat_timestamp = is_repeat_timestamp | is_large_frame_jump
+
+ repeat_timestamp_labels = label(is_repeat_timestamp)[0]
+ repeat_timestamp_labels_id, repeat_timestamp_label_counts = np.unique(
+ repeat_timestamp_labels, return_counts=True
+ )
+ is_repeat = (repeat_timestamp_labels_id != 0) & (repeat_timestamp_label_counts > 2)
+ repeat_timestamp_labels_id = repeat_timestamp_labels_id[is_repeat]
+ repeat_timestamp_label_counts = repeat_timestamp_label_counts[is_repeat]
+ is_repeat_timestamp[
+ ~np.isin(repeat_timestamp_labels, repeat_timestamp_labels_id)
+ ] = False
+
+ non_repeat_timestamp_labels = label(~is_repeat_timestamp)[0]
+ non_repeat_timestamp_labels_id = np.unique(non_repeat_timestamp_labels)
+ non_repeat_timestamp_labels_id = non_repeat_timestamp_labels_id[
+ non_repeat_timestamp_labels_id != 0
+ ]
+
+ return (non_repeat_timestamp_labels, non_repeat_timestamp_labels_id)
+
+
+def detect_ptp(mcu_neural_timestamps, ptp_time):
+ """Determine if PTP was used by finding the common
+ interval between the neural and camera timestamps.
+
+ A better way to do this would be to detect it in
+ the header of the .rec file"""
+ mcu_neural_timestamps = np.asarray(mcu_neural_timestamps)
+ ptp_time = np.asarray(ptp_time)
+ common_interval_duration = min(
+ np.max(mcu_neural_timestamps), np.max(ptp_time)
+ ) - max(np.min(mcu_neural_timestamps), np.min(ptp_time))
+
+ return common_interval_duration > 0.0
+
+
+def estimate_camera_time_from_mcu_time(video_info, mcu_neural_timestamps):
+ """
+
+ Parameters
+ ----------
+ video_info : pd.DataFrame
+ mcu_neural_timestamps : pd.DataFrame
+
+ Returns
+ -------
+ camera_systime : np.ndarray, shape (n_frames_within_neural_time,)
+ is_valid_camera_time : np.ndarray, shape (n_frames,)
+
+ """
+ is_valid_camera_time = np.isin(video_info.index, mcu_neural_timestamps.index)
+ camera_systime = np.asarray(
+ mcu_neural_timestamps.loc[video_info.index[is_valid_camera_time]]
+ )
+
+ return camera_systime, is_valid_camera_time
+
+
+def estimate_camera_to_mcu_lag(camera_systime, dio_systime, n_breaks):
+ if n_breaks == 0:
+ dio_systime = dio_systime[: len(camera_systime)]
+ camera_to_mcu_lag = np.median(camera_systime - dio_systime)
+ else:
+ camera_to_mcu_lag = camera_systime[0] - dio_systime[0]
+
+ logger.info(
+ "estimated trodes to camera lag: "
+ f"{camera_to_mcu_lag / NANOSECONDS_PER_SECOND:0.3f} s"
+ )
+ return camera_to_mcu_lag
+
+
+def remove_acquisition_timing_pause_non_ptp(
+ dio_systime, frame_count, camera_systime, is_valid_camera_time, pause_mid_time
+):
+ dio_systime = dio_systime[dio_systime > pause_mid_time]
+ frame_count = frame_count[is_valid_camera_time][camera_systime > pause_mid_time]
+ is_valid_camera_time[is_valid_camera_time] = camera_systime > pause_mid_time
+ camera_systime = camera_systime[camera_systime > pause_mid_time]
+
+ return dio_systime, frame_count, is_valid_camera_time, camera_systime
+
+
+def correct_timestamps_for_camera_to_mcu_lag(
+ frame_count, camera_systime, camera_to_mcu_lag
+):
+ regression_result = linregress(frame_count, camera_systime - camera_to_mcu_lag)
+ corrected_camera_systime = (
+ regression_result.intercept + frame_count * regression_result.slope
+ )
+ corrected_camera_systime /= NANOSECONDS_PER_SECOND
+
+ return corrected_camera_systime
+
+
+def get_cameras_ids(dataset_names, metadata):
+ camera_ids = []
+ for dataset_name in dataset_names:
+ # extract the first the first element of the dataset_name as the epoch number
+ dataset_elements = str(dataset_name).split("_")
+ epoch_num = str(int(dataset_elements[0]))
+ try:
+ camera_ids.append(
+ next(
+ task["camera_id"]
+ for task in metadata["tasks"]
+ if epoch_num in task["task_epochs"]
+ )[0]
+ )
+ except Exception as e:
+ print(e)
+ raise InvalidMetadataException("Invalid camera metadata for datasets")
+ return camera_ids
+
+
+def get_meters_per_pixels(cameras_ids, metadata):
+ meters_per_pixels = []
+ for camera_id in cameras_ids:
+ try:
+ meters_per_pixels.append(
+ next(
+ float(camera["meters_per_pixel"])
+ for camera in metadata["cameras"]
+ if camera_id == camera["id"]
+ )
+ )
+ except Exception as e:
+ print(e)
+ raise InvalidMetadataException("Invalid camera metadata")
+ return meters_per_pixels
diff --git a/rec_to_nwb/processing/builder/originators/probe_originator.py b/rec_to_nwb/processing/builder/originators/probe_originator.py
index b40559ecd..8e61e652f 100644
--- a/rec_to_nwb/processing/builder/originators/probe_originator.py
+++ b/rec_to_nwb/processing/builder/originators/probe_originator.py
@@ -1,10 +1,14 @@
import logging.config
import os
-from rec_to_nwb.processing.nwb.components.device.probe.fl_probe_manager import FlProbeManager
+from rec_to_nwb.processing.nwb.components.device.probe.fl_probe_manager import \
+ FlProbeManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -17,9 +21,11 @@ def __init__(self, device_factory, device_injector, probes_metadata):
def make(self, nwb_content, shanks_dict, probes_valid_map, ):
logger.info('Probes: Building')
- fl_probes = self.fl_probe_manager.get_fl_probes(shanks_dict, probes_valid_map)
+ fl_probes = self.fl_probe_manager.get_fl_probes(
+ shanks_dict, probes_valid_map)
logger.info('Probes: Creating probes')
- probes = [self.device_factory.create_probe(fl_probe) for fl_probe in fl_probes]
+ probes = [self.device_factory.create_probe(
+ fl_probe) for fl_probe in fl_probes]
logger.info('Probes: Injecting probes into NWB')
self.device_injector.inject_all_devices(nwb_content, probes)
return probes
diff --git a/rec_to_nwb/processing/builder/originators/processing_module_originator.py b/rec_to_nwb/processing/builder/originators/processing_module_originator.py
index 849f32842..38491bb8a 100644
--- a/rec_to_nwb/processing/builder/originators/processing_module_originator.py
+++ b/rec_to_nwb/processing/builder/originators/processing_module_originator.py
@@ -1,10 +1,14 @@
import logging.config
import os
-from rec_to_nwb.processing.nwb.components.processing_module.processing_module_creator import ProcessingModuleCreator
+from rec_to_nwb.processing.nwb.components.processing_module.processing_module_creator import \
+ ProcessingModuleCreator
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -13,36 +17,45 @@ class ProcessingModuleOriginator:
@staticmethod
def make(nwb_content):
logger.info('ProcessingModule: Creating behavior')
- pm_creator = ProcessingModuleCreator('behavior', 'Contains all behavior-related data')
+ pm_creator = ProcessingModuleCreator(
+ 'behavior', 'Contains all behavior-related data')
logger.info('ProcessingModule: Injecting behavior')
nwb_content.add_processing_module(pm_creator.processing_module)
logger.info('ProcessingModule: Creating tasks')
- pm_creator = ProcessingModuleCreator('tasks', 'Contains all tasks information')
+ pm_creator = ProcessingModuleCreator(
+ 'tasks', 'Contains all tasks information')
logger.info('ProcessingModule: Injecting tasks')
nwb_content.add_processing_module(pm_creator.processing_module)
logger.info('ProcessingModule: Creating associated files')
- pm_creator = ProcessingModuleCreator('associated_files', 'Contains all associated files data')
+ pm_creator = ProcessingModuleCreator(
+ 'associated_files', 'Contains all associated files data')
logger.info('ProcessingModule: Injecting associated files')
nwb_content.add_processing_module(pm_creator.processing_module)
logger.info('ProcessingModule: Creating video files')
- pm_creator = ProcessingModuleCreator('video_files', 'Contains all associated video files data')
+ pm_creator = ProcessingModuleCreator(
+ 'video_files', 'Contains all associated video files data')
logger.info('ProcessingModule: Injecting video files')
nwb_content.add_processing_module(pm_creator.processing_module)
logger.info('ProcessingModule: Creating analog')
- pm_creator = ProcessingModuleCreator('analog', 'Contains all analog data')
+ pm_creator = ProcessingModuleCreator(
+ 'analog', 'Contains all analog data')
logger.info('ProcessingModule: Injecting analog')
nwb_content.add_processing_module(pm_creator.processing_module)
- logger.info('ProcessingModule: Creating sample count-timestamp corespondence')
- pm_creator = ProcessingModuleCreator('sample_count', 'corespondence between sample count and timestamps')
- logger.info('ProcessingModule: Injecting sample count-timestamp corespondence')
+ logger.info(
+ 'ProcessingModule: Creating sample count-timestamp corespondence')
+ pm_creator = ProcessingModuleCreator(
+ 'sample_count', 'corespondence between sample count and timestamps')
+ logger.info(
+ 'ProcessingModule: Injecting sample count-timestamp corespondence')
nwb_content.add_processing_module(pm_creator.processing_module)
logger.info('ProcessingModule: Creating Camera Sample Frame Counts')
- pm_creator = ProcessingModuleCreator('camera_sample_frame_counts', 'Camera Sample Frame Counts')
+ pm_creator = ProcessingModuleCreator(
+ 'camera_sample_frame_counts', 'Camera Sample Frame Counts')
logger.info('ProcessingModule: Injecting camera_sample_frame_counts')
nwb_content.add_processing_module(pm_creator.processing_module)
diff --git a/rec_to_nwb/processing/builder/originators/sample_count_timestamp_corespondence_originator.py b/rec_to_nwb/processing/builder/originators/sample_count_timestamp_corespondence_originator.py
index c30f6837e..6197e8605 100644
--- a/rec_to_nwb/processing/builder/originators/sample_count_timestamp_corespondence_originator.py
+++ b/rec_to_nwb/processing/builder/originators/sample_count_timestamp_corespondence_originator.py
@@ -1,5 +1,5 @@
-import os
import logging.config
+import os
from rec_to_nwb.processing.nwb.components.sample_count_timestamp_corespondence.sample_count_timestamp_corespondence_injector import \
SampleCountTimestampCorespondenceInjector
@@ -7,7 +7,10 @@
SampleCountTimestampCorespondenceManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -26,4 +29,4 @@ def make(self, nwb_content):
nwb_content=nwb_content,
processing_module_name="sample_count",
timeseries=timeseries
- )
\ No newline at end of file
+ )
diff --git a/rec_to_nwb/processing/builder/originators/shanks_electrodes_originator.py b/rec_to_nwb/processing/builder/originators/shanks_electrodes_originator.py
index 65175cab9..033758c29 100644
--- a/rec_to_nwb/processing/builder/originators/shanks_electrodes_originator.py
+++ b/rec_to_nwb/processing/builder/originators/shanks_electrodes_originator.py
@@ -7,14 +7,18 @@
ShanksElectrodeCreator
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class ShanksElectrodeOriginator:
def __init__(self, probes, metadata):
- self.fl_shanks_electrode_manager = FlShanksElectrodeManager(probes, metadata['electrode groups'])
+ self.fl_shanks_electrode_manager = FlShanksElectrodeManager(
+ probes, metadata['electrode_groups'])
self.shanks_electrodes_creator = ShanksElectrodeCreator()
def make(self):
diff --git a/rec_to_nwb/processing/builder/originators/shanks_originator.py b/rec_to_nwb/processing/builder/originators/shanks_originator.py
index 99dbc5238..df1308ebe 100644
--- a/rec_to_nwb/processing/builder/originators/shanks_originator.py
+++ b/rec_to_nwb/processing/builder/originators/shanks_originator.py
@@ -1,25 +1,33 @@
-import os
import logging.config
+import os
-from rec_to_nwb.processing.nwb.components.device.probe.shanks.fl_shank_manager import FlShankManager
-from rec_to_nwb.processing.nwb.components.device.probe.shanks.shank_creator import ShankCreator
+from rec_to_nwb.processing.nwb.components.device.probe.shanks.fl_shank_manager import \
+ FlShankManager
+from rec_to_nwb.processing.nwb.components.device.probe.shanks.shank_creator import \
+ ShankCreator
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class ShanksOriginator:
def __init__(self, probes, metadata):
- self.fl_shank_manager = FlShankManager(probes, metadata['electrode groups'])
+ self.fl_shank_manager = FlShankManager(
+ probes, metadata['electrode_groups'])
self.shank_creator = ShankCreator()
def make(self, shanks_electrodes_dict):
logger.info('Probes-Shanks: Building')
- fl_shanks_dict = self.fl_shank_manager.get_fl_shanks_dict(shanks_electrodes_dict)
+ fl_shanks_dict = self.fl_shank_manager.get_fl_shanks_dict(
+ shanks_electrodes_dict)
logger.info('Probes-Shanks: Creating')
shanks_dict = {}
for probe_type, fl_shanks in fl_shanks_dict.items():
- shanks_dict[probe_type] = [self.shank_creator.create(fl_shank) for fl_shank in fl_shanks]
- return shanks_dict
\ No newline at end of file
+ shanks_dict[probe_type] = [self.shank_creator.create(
+ fl_shank) for fl_shank in fl_shanks]
+ return shanks_dict
diff --git a/rec_to_nwb/processing/builder/originators/task_originator.py b/rec_to_nwb/processing/builder/originators/task_originator.py
index 256220c28..124738c74 100644
--- a/rec_to_nwb/processing/builder/originators/task_originator.py
+++ b/rec_to_nwb/processing/builder/originators/task_originator.py
@@ -5,7 +5,10 @@
from rec_to_nwb.processing.nwb.components.task.task_manager import TaskManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
diff --git a/rec_to_nwb/processing/builder/originators/video_files_originator.py b/rec_to_nwb/processing/builder/originators/video_files_originator.py
index 332cfe489..465db64f6 100644
--- a/rec_to_nwb/processing/builder/originators/video_files_originator.py
+++ b/rec_to_nwb/processing/builder/originators/video_files_originator.py
@@ -1,19 +1,31 @@
-from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_manager import FlVideoFilesManager
-from rec_to_nwb.processing.nwb.components.video_files.video_files_creator import VideoFilesCreator
-from rec_to_nwb.processing.nwb.components.video_files.video_files_injector import VideoFilesInjector
+from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_manager import \
+ FlVideoFilesManager
+from rec_to_nwb.processing.nwb.components.video_files.video_files_creator import \
+ VideoFilesCreator
+from rec_to_nwb.processing.nwb.components.video_files.video_files_injector import \
+ VideoFilesInjector
class VideoFilesOriginator:
- def __init__(self, raw_data_path, video_path, video_files_metadata):
+ def __init__(self, raw_data_path, video_path, video_files_metadata,
+ convert_timestamps=True,
+ return_timestamps=True):
self.video_directory = video_path
- self.fl_video_files_manager = FlVideoFilesManager(raw_data_path, video_path, video_files_metadata)
+ self.fl_video_files_manager = FlVideoFilesManager(
+ raw_data_path, video_path, video_files_metadata,
+ convert_timestamps=convert_timestamps,
+ return_timestamps=return_timestamps)
def make(self, nwb_content):
+ """Gets the name, timestamps and device for each video file,
+ creates a ImageSeries object that corresponds to the video, and
+ then inserts the ImageSeries object into an NWBFile under the path:
+ processing/video_files"""
fl_video_files = self.fl_video_files_manager.get_video_files()
image_series_list = [
- VideoFilesCreator.create(fl_video_file, self.video_directory, nwb_content)
+ VideoFilesCreator.create(
+ fl_video_file, self.video_directory, nwb_content)
for fl_video_file in fl_video_files
]
VideoFilesInjector.inject_all(nwb_content, image_series_list)
-
diff --git a/rec_to_nwb/processing/builder/raw_to_nwb_builder.py b/rec_to_nwb/processing/builder/raw_to_nwb_builder.py
index 673613b2c..f48b34618 100644
--- a/rec_to_nwb/processing/builder/raw_to_nwb_builder.py
+++ b/rec_to_nwb/processing/builder/raw_to_nwb_builder.py
@@ -1,33 +1,31 @@
import logging.config
import os
import shutil
+from datetime import datetime
+import pynwb
+import numpy as np
+import pytz
from rec_to_binaries import extract_trodes_rec_file
-
-from rec_to_nwb.processing.header.reconfig_header_checker import ReconfigHeaderChecker
-from rec_to_nwb.processing.metadata.metadata_manager import MetadataManager
+from rec_to_binaries.trodes_data import get_trodes_version_from_path
from rec_to_nwb.processing.builder.nwb_file_builder import NWBFileBuilder
+from rec_to_nwb.processing.metadata.metadata_manager import MetadataManager
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.validation.not_empty_validator import NotEmptyValidator
-from rec_to_nwb.processing.validation.validation_registrator import ValidationRegistrator
+from rec_to_nwb.processing.validation.not_empty_validator import \
+ NotEmptyValidator
+from rec_to_nwb.processing.validation.validation_registrator import \
+ ValidationRegistrator
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
-_DEFAULT_LFP_EXPORT_ARGS = ('-highpass', '0', '-lowpass', '400',
- '-interp', '0', '-userefs', '0',
- '-outputrate', '1500')
-_DEFAULT_MDA_EXPORT_ARGS = ('-usespikefilters', '0',
- '-interp', '0', '-userefs', '0')
-
-_DEFAULT_ANALOG_EXPORT_ARGS = ()
-_DEFAULT_DIO_EXPORT_ARGS = ()
-_DEFAULT_SPIKE_EXPORT_ARGS = ()
-_DEFAULT_TIME_EXPORT_ARGS = ()
-
-_DEFAULT_TRODES_REC_EXPORT_ARGS = ()
+# temporary default value, for old dataset only
+_DEFAULT_SESSION_START_TIME = datetime.fromtimestamp(
+ 0, pytz.utc) # dummy value for now
class RawToNWBBuilder:
@@ -53,7 +51,7 @@ class RawToNWBBuilder:
spikes_export_args (tuple of strings): parameters to launch spikes extraction from spikegadgets
time_export_args (tuple of strings): parameters to launch time extraction from spikegadgets
trodes_rec_export_args (tuple of strings): parameters to launch analog extraction from spikegadgets
- parallel_instances (int): number of parallel processes used during processing data
+ parallel_instances (int): number of parallel processes used during extracting data
Methods:
build_nwb()
@@ -70,20 +68,21 @@ def __init__(
nwb_metadata: MetadataManager,
output_path: str = '',
video_path: str = '',
+ preprocessing_path: str = '',
extract_analog: bool = True,
extract_spikes: bool = False,
extract_lfps: bool = False,
extract_dio: bool = True,
extract_mda: bool = True,
overwrite: bool = True,
- lfp_export_args: tuple = _DEFAULT_LFP_EXPORT_ARGS,
- mda_export_args: tuple = _DEFAULT_MDA_EXPORT_ARGS,
- analog_export_args: tuple = _DEFAULT_ANALOG_EXPORT_ARGS,
- dio_export_args: tuple = _DEFAULT_DIO_EXPORT_ARGS,
- time_export_args: tuple = _DEFAULT_TIME_EXPORT_ARGS,
- spikes_export_args: tuple = _DEFAULT_SPIKE_EXPORT_ARGS,
+ lfp_export_args: tuple = None,
+ mda_export_args: tuple = None,
+ analog_export_args: tuple = None,
+ dio_export_args: tuple = None,
+ time_export_args: tuple = None,
+ spikes_export_args: tuple = None,
parallel_instances: int = 4,
- trodes_rec_export_args: tuple = _DEFAULT_TRODES_REC_EXPORT_ARGS
+ trodes_rec_export_args: tuple = None,
):
validation_registrator = ValidationRegistrator()
@@ -92,6 +91,45 @@ def __init__(
validation_registrator.register(NotEmptyValidator(dates))
validation_registrator.validate()
+ trodes_version = get_trodes_version_from_path()[0]
+
+ if lfp_export_args is None:
+ if trodes_version < 2.0:
+ lfp_export_args = ('-highpass', '0',
+ '-lowpass', '400',
+ '-interp', '0',
+ '-userefs', '0',
+ '-outputrate', '1500')
+ else:
+ lfp_export_args = ('-lfphighpass', '0',
+ '-lfplowpass', '400',
+ '-interp', '0',
+ '-uselfprefs', '0',
+ '-outputrate', '1500',
+ '-sortingmode', '1')
+ if mda_export_args is None:
+ if trodes_version < 2.0:
+ mda_export_args = ('-usespikefilters', '0',
+ '-interp', '0',
+ '-userefs', '0')
+ else:
+ mda_export_args = ('-usespikefilters', '0',
+ '-interp', '1',
+ '-userawrefs', '0',
+ '-usespikerefs', '0',
+ '-sortingmode', '1')
+
+ if analog_export_args is None:
+ analog_export_args = ()
+ if dio_export_args is None:
+ dio_export_args = ()
+ if spikes_export_args is None:
+ spikes_export_args = ()
+ if time_export_args is None:
+ time_export_args = ()
+ if trodes_rec_export_args is None:
+ trodes_rec_export_args = ()
+
self.extract_analog = extract_analog
self.extract_spikes = extract_spikes
self.extract_dio = extract_dio
@@ -110,11 +148,29 @@ def __init__(
self.metadata = nwb_metadata.metadata
self.output_path = output_path
self.video_path = video_path
+ if not preprocessing_path:
+ self.preprocessing_path = data_path
+ else:
+ self.preprocessing_path = preprocessing_path
self.probes = nwb_metadata.probes
self.nwb_metadata = nwb_metadata
self.parallel_instances = parallel_instances
self.trodes_rec_export_args = trodes_rec_export_args
+ self.is_old_dataset = self.__is_old_dataset()
+
+ def __repr__(self):
+ return ("RawToNWBBuilder(\n"
+ f" animal_name={self.animal_name},\n"
+ f" data_path={self.data_path},\n"
+ f" dates={self.dates},\n"
+ f" overwrite={self.overwrite},\n"
+ f" output_path={self.output_path},\n"
+ f" video_path={self.video_path},\n"
+ f" preprocessing_path={self.preprocessing_path},\n"
+ f" trodes_rec_export_args={self.trodes_rec_export_args},\n"
+ ")")
+
def __is_rec_config_valid(self):
""" Check if XML is valid with XSD file """
@@ -131,7 +187,23 @@ def __get_header_path(self):
xml_file_path = self.trodes_rec_export_args[counter + 1]
return xml_file_path
- def build_nwb(self, process_mda_valid_time=True, process_mda_invalid_time=True,
+ def __is_old_dataset(self):
+ # check raw directory for the single (first) date
+ all_files = os.listdir(
+ os.path.join(self.data_path, self.animal_name, 'raw',
+ self.dates[0]))
+ if any([('videoTimeStamps.cameraHWSync' in file) for file in all_files]):
+ # has cameraHWSync files; new dataset
+ return False
+ if any([('videoTimeStamps.cameraHWFrameCount' in file) for file in all_files]):
+ # has cameraHWFrameCount files instead; old dataset
+ logger.info('Seems to be an old dataset (no PTP)')
+ return True
+ raise FileNotFoundError(
+ 'need either cameraHWSync or cameraHWFrameCount files.')
+
+ def build_nwb(self, run_preprocessing=True,
+ process_mda_valid_time=True, process_mda_invalid_time=True,
process_pos_valid_time=True, process_pos_invalid_time=True):
"""Builds nwb file for experiments from given dates.
@@ -145,58 +217,123 @@ def build_nwb(self, process_mda_valid_time=True, process_mda_invalid_time=True,
process_pos_invalid_time (boolean): True if the pos invalid times should be build and append to nwb.
Need the pos data inside the nwb. (default True)
"""
+ logger.info(' START '.center(40, "="))
+ # check associated files with yaml first, before the time consuming "preprocessing".
+ #for date in self.dates:
+ # nwb_builder = self.get_nwb_builder(date)
- self.__preprocess_data()
+ if run_preprocessing:
+ self.__preprocess_data()
+
+ self.__build_nwb_file(
+ process_mda_valid_time=process_mda_valid_time,
+ process_mda_invalid_time=process_mda_invalid_time,
+ process_pos_valid_time=process_pos_valid_time,
+ process_pos_invalid_time=process_pos_invalid_time)
+
+ logger.info('Done...\n')
+
+ def __build_nwb_file(self,
+ process_mda_valid_time=False,
+ process_mda_invalid_time=False,
+ process_pos_valid_time=False,
+ process_pos_invalid_time=False):
+ logger.info('Building NWB files')
+ os.makedirs(self.output_path, exist_ok=True)
+ os.makedirs(self.video_path, exist_ok=True)
for date in self.dates:
- nwb_builder = NWBFileBuilder(
- data_path=self.data_path,
- animal_name=self.animal_name,
- date=date,
- nwb_metadata=self.nwb_metadata,
- output_file=self.output_path + self.animal_name + date + ".nwb",
- process_mda=self.extract_mda,
- process_dio=self.extract_dio,
- process_analog=self.extract_analog,
- video_path=self.video_path,
- reconfig_header=self.__is_rec_config_valid()
- )
+ logger.info('Date: {}'.format(date))
+ nwb_builder = self.get_nwb_builder(date)
content = nwb_builder.build()
nwb_builder.write(content)
- self.append_to_nwb(
- nwb_builder=nwb_builder,
- process_mda_valid_time=process_mda_valid_time,
- process_mda_invalid_time=process_mda_invalid_time,
- process_pos_valid_time=process_pos_valid_time,
- process_pos_invalid_time=process_pos_invalid_time
- )
+ if self.is_old_dataset:
+ logger.info('(old dataset: skipping append_to_nwb)')
+ continue
+ # self.append_to_nwb(
+ # nwb_builder=nwb_builder,
+ # process_mda_valid_time=process_mda_valid_time,
+ # process_mda_invalid_time=process_mda_invalid_time,
+ # process_pos_valid_time=process_pos_valid_time,
+ # process_pos_invalid_time=process_pos_invalid_time
+ # )
+
+ def basic_test(self):
+ nwb_file_name = os.path.join(self.output_path,self.animal_name+self.dates[0] + '.nwb')
+
+ io = pynwb.NWBHDF5IO(nwb_file_name,'r')
+ nwbf = io.read()
+
+ epochs = nwbf.epochs.to_dataframe()
+
+ rawdata = nwbf.get_acquisition()
+ timestamps=np.asarray(rawdata.timestamps)
+
+ # timestamps and neural data should be of same length in time
+ assert rawdata.data.shape[0]==timestamps.shape[0],'timestamps and neural data are of different lengths.'
+ logger.info('Timestamps and neural data are of same length in time.\n')
+
+ # timestamps should be scrictly increasing
+ assert np.all(np.diff(timestamps) > 0),'timestamps are not strictly increasing. Epochs may be concatenated out of order'
+ logger.info('Timestamps are scrictly increasing. \n')
+
+ # position time should start later than/equal to recording; end earlier than/equal recording
+ assert epochs['start_time'][0]>=timestamps[0]
+ assert np.array(epochs['stop_time'])[-1]<=timestamps[-1]
+
+ def get_nwb_builder(self, date):
+ if self.is_old_dataset:
+ old_dataset_kwargs = dict(
+ is_old_dataset=True,
+ session_start_time=_DEFAULT_SESSION_START_TIME)
+ else:
+ old_dataset_kwargs = dict()
+
+ return NWBFileBuilder(
+ data_path=self.data_path,
+ animal_name=self.animal_name,
+ date=date,
+ nwb_metadata=self.nwb_metadata,
+ output_file=os.path.join(
+ self.output_path, self.animal_name + date + ".nwb"),
+ process_mda=self.extract_mda,
+ process_dio=self.extract_dio,
+ process_analog=self.extract_analog,
+ preprocessing_path=self.preprocessing_path,
+ video_path=self.video_path,
+ reconfig_header=self.__get_header_path(),
+ # reconfig_header=self.__is_rec_config_valid()
+ **old_dataset_kwargs
+ )
def __preprocess_data(self):
- """process data with rec_to_binaries library"""
-
+ """Process data with rec_to_binaries library"""
+ logger.info('Extracting binaries from rec files...')
logger.info(
- 'Extraction parameters :' + '\n'
- + 'data_path = ' + self.data_path + '\n'
- + 'animal_name = ' + self.animal_name + '\n'
- + 'parallel_instances = ' + str(self.parallel_instances) + '\n'
- + 'extract_analog = ' + str(self.extract_analog) + '\n'
- + 'extract_dio = ' + str(self.extract_dio) + '\n'
- + 'extract_time = ' + str(True) + '\n'
- + 'extract_mda = ' + str(self.extract_mda) + '\n'
- + 'extract_lfps = ' + str(self.extract_lfps) + '\n'
- + 'extract_spikes = ' + str(self.extract_spikes) + '\n'
- + 'overwrite = ' + str(self.overwrite) + '\n'
- + 'lfp_export_args = ' + str(self.lfp_export_args) + '\n'
- + 'mda_export_args = ' + str(self.mda_export_args) + '\n'
- + 'analog_export_args = ' + str(self.analog_export_args) + '\n'
- + 'time_export_args = ' + str(self.time_export_args) + '\n'
- + 'spikes_export_args = ' + str(self.spikes_export_args) + '\n'
- + 'dio_export_args = ' + str(self.dio_export_args) + '\n'
- + 'trodes_rec_export_args = ' + str(self.trodes_rec_export_args) + '\n'
+ 'Rec to binaries extraction parameters :' + '\n'
+ + '\t data_path = ' + self.data_path + '\n'
+ + '\t animal_name = ' + self.animal_name + '\n'
+ + '\t parallel_instances = ' + str(self.parallel_instances) + '\n'
+ + '\t extract_analog = ' + str(self.extract_analog) + '\n'
+ + '\t extract_dio = ' + str(self.extract_dio) + '\n'
+ + '\t extract_time = ' + str(True) + '\n'
+ + '\t extract_mda = ' + str(self.extract_mda) + '\n'
+ + '\t extract_lfps = ' + str(self.extract_lfps) + '\n'
+ + '\t extract_spikes = ' + str(self.extract_spikes) + '\n'
+ + '\t overwrite = ' + str(self.overwrite) + '\n'
+ + '\t lfp_export_args = ' + str(self.lfp_export_args) + '\n'
+ + '\t mda_export_args = ' + str(self.mda_export_args) + '\n'
+ + '\t analog_export_args = ' + str(self.analog_export_args) + '\n'
+ + '\t time_export_args = ' + str(self.time_export_args) + '\n'
+ + '\t spikes_export_args = ' + str(self.spikes_export_args) + '\n'
+ + '\t dio_export_args = ' + str(self.dio_export_args) + '\n'
+ + '\t trodes_rec_export_args = ' + str(self.trodes_rec_export_args)
)
extract_trodes_rec_file(
self.data_path,
self.animal_name,
+ out_dir=self.preprocessing_path,
+ dates=self.dates,
parallel_instances=self.parallel_instances,
extract_analog=self.extract_analog,
extract_dio=self.extract_dio,
@@ -212,11 +349,12 @@ def __preprocess_data(self):
spikes_export_args=self.spikes_export_args,
time_export_args=self.time_export_args,
)
- self.__is_rec_config_valid()
+ logger.info('Done extracting binaries from rec files...')
+ # self.__is_rec_config_valid()
@staticmethod
def append_to_nwb(nwb_builder, process_mda_valid_time, process_mda_invalid_time,
- process_pos_valid_time, process_pos_invalid_time):
+ process_pos_valid_time, process_pos_invalid_time):
"""Append to NWBFile that was build using NWBFileBuilder passed in parameter.
Args:
@@ -234,11 +372,10 @@ def append_to_nwb(nwb_builder, process_mda_valid_time, process_mda_invalid_time,
process_pos_invalid_time=process_pos_invalid_time
)
-
def cleanup(self):
"""Remove all temporary files structure from preprocessing folder"""
- preprocessing = self.data_path + '/' + self.animal_name + '/preprocessing'
+ preprocessing = os.path.join(
+ self.preprocessing_path, self.animal_name, 'preprocessing')
if os.path.exists(preprocessing):
shutil.rmtree(preprocessing)
-
diff --git a/rec_to_nwb/processing/header/header_checker/header_extractor.py b/rec_to_nwb/processing/header/header_checker/header_extractor.py
index e45c6ccad..39be1c8e9 100644
--- a/rec_to_nwb/processing/header/header_checker/header_extractor.py
+++ b/rec_to_nwb/processing/header/header_checker/header_extractor.py
@@ -1,3 +1,5 @@
+from pathlib import Path
+
from rec_to_nwb.processing.header.xml_extractor import XMLExtractor
@@ -6,10 +8,15 @@ class HeaderFilesExtractor:
def __init__(self):
self.xml_files = []
- def extract_headers_from_rec_files(self, rec_files):
+ def extract_headers_from_rec_files(self, rec_files, copy_dir=None):
for rec_file in rec_files:
+ if copy_dir is not None:
+ rec_copy = Path(copy_dir).joinpath(rec_file.name)
+ xml_file = str(rec_copy) + '_header' + '.xml'
+ else:
+ xml_file = str(rec_file) + '_header' + '.xml'
temp_xml_extractor = XMLExtractor(rec_path=rec_file,
- xml_path=str(rec_file) + '_header' + '.xml')
+ xml_path=xml_file)
temp_xml_extractor.extract_xml_from_rec_file()
- self.xml_files.append(str(rec_file) + '_header' + '.xml')
+ self.xml_files.append(xml_file)
return self.xml_files
diff --git a/rec_to_nwb/processing/header/header_checker/header_logger.py b/rec_to_nwb/processing/header/header_checker/header_logger.py
index 841a47b6b..1187c58ea 100644
--- a/rec_to_nwb/processing/header/header_checker/header_logger.py
+++ b/rec_to_nwb/processing/header/header_checker/header_logger.py
@@ -2,7 +2,10 @@
import os
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -11,8 +14,6 @@ class HeaderLogger:
@staticmethod
def log_header_differences(headers_differences, rec_files_list):
if headers_differences:
- message = 'Rec files: ' + str(rec_files_list) + ' contain inconsistent xml headers!\n'
- differences = [diff for diff in headers_differences
- if 'systemTimeAtCreation' not in str(diff) and 'timestampAtCreation'
- not in str(diff)]
- logger.warning('%s , %s', message, differences)
+ message = 'Rec files: ' + \
+ str(rec_files_list) + ' contain inconsistent xml headers!'
+ logger.warning(message)
diff --git a/rec_to_nwb/processing/header/header_checker/header_processor.py b/rec_to_nwb/processing/header/header_checker/header_processor.py
index 68460777f..d18fc7d54 100644
--- a/rec_to_nwb/processing/header/header_checker/header_processor.py
+++ b/rec_to_nwb/processing/header/header_checker/header_processor.py
@@ -6,9 +6,9 @@
class HeaderProcessor:
@staticmethod
- def process_headers(rec_files_list):
+ def process_headers(rec_files_list, copy_dir=None):
headers_extractor = HeaderFilesExtractor()
- header_files = headers_extractor.extract_headers_from_rec_files(rec_files_list)
+ header_files = headers_extractor.extract_headers_from_rec_files(rec_files_list, copy_dir=copy_dir)
header_comparator = HeaderComparator(header_files)
headers_differences = header_comparator.compare()
diff --git a/rec_to_nwb/processing/header/reconfig_header_checker.py b/rec_to_nwb/processing/header/reconfig_header_checker.py
index 73a0e2044..a9ed45a1f 100644
--- a/rec_to_nwb/processing/header/reconfig_header_checker.py
+++ b/rec_to_nwb/processing/header/reconfig_header_checker.py
@@ -1,15 +1,16 @@
import os
import xmlschema
-
-from rec_to_nwb.processing.validation.validation_registrator import ValidationRegistrator
-from rec_to_nwb.processing.validation.xml_files_validation import XmlFilesValidator
+from rec_to_nwb.processing.validation.validation_registrator import \
+ ValidationRegistrator
+from rec_to_nwb.processing.validation.xml_files_validation import \
+ XmlFilesValidator
path = os.path.dirname(os.path.abspath(__file__))
class ReconfigHeaderChecker:
-
+
@classmethod
def validate(cls, xml_header_path):
if xml_header_path:
@@ -27,6 +28,8 @@ def __validate_xml_header(cls, xml_header_path):
@classmethod
def __compare_with_xml_schema(cls, xml_header_path):
- xsd_file_path = str(path) + '/../../../rec_to_nwb/data/header_schema.xsd'
+ xsd_file_path = os.path.join(
+ str(path), os.pardir, os.pardir, os.pardir, 'rec_to_nwb', 'data',
+ 'header_schema.xsd')
xsd_schema = xmlschema.XMLSchema(xsd_file_path)
- xmlschema.validate(xml_header_path, xsd_schema)
\ No newline at end of file
+ xmlschema.validate(xml_header_path, xsd_schema)
diff --git a/rec_to_nwb/processing/header/xml_extractor.py b/rec_to_nwb/processing/header/xml_extractor.py
index ab56b6ac6..868fdcb24 100644
--- a/rec_to_nwb/processing/header/xml_extractor.py
+++ b/rec_to_nwb/processing/header/xml_extractor.py
@@ -28,7 +28,9 @@ class XMLExtractor:
xml_path = ''
xsd_path = ''
- def __init__(self, rec_path='../data/REC_sample.xml', xml_path='../data/output.xml',
+ def __init__(self,
+ rec_path='../data/REC_sample.xml',
+ xml_path='../data/output.xml',
xsd_path=None):
self.rec_path = rec_path
self.xml_path = xml_path
diff --git a/rec_to_nwb/processing/metadata/corrupted_data_manager.py b/rec_to_nwb/processing/metadata/corrupted_data_manager.py
index b5c4cce96..f6f5d3cc6 100644
--- a/rec_to_nwb/processing/metadata/corrupted_data_manager.py
+++ b/rec_to_nwb/processing/metadata/corrupted_data_manager.py
@@ -1,6 +1,5 @@
-import copy
-
-from rec_to_nwb.processing.exceptions.corrupted_data_exception import CorruptedDataException
+from rec_to_nwb.processing.exceptions.corrupted_data_exception import \
+ CorruptedDataException
from rec_to_nwb.processing.tools.beartype.beartype import beartype
@@ -27,14 +26,14 @@ def get_valid_map_dict(self) -> dict:
"""
electrodes_valid_map = self.__get_electrodes_valid_map(
- ntrode_metadata=self.metadata['ntrode electrode group channel map']
+ ntrode_metadata=self.metadata['ntrode_electrode_group_channel_map']
)
electrode_groups_valid_map = self.__get_electrode_groups_valid_map(
- ntrode_metadata=self.metadata['ntrode electrode group channel map'],
+ ntrode_metadata=self.metadata['ntrode_electrode_group_channel_map'],
electrodes_valid_map=electrodes_valid_map
)
probes_valid_map = self.__get_probes_valid_map(
- electrode_groups_metadata=self.metadata['electrode groups'],
+ electrode_groups_metadata=self.metadata['electrode_groups'],
electrode_groups_valid_map=electrode_groups_valid_map
)
@@ -51,30 +50,21 @@ def get_valid_map_dict(self) -> dict:
def __get_electrodes_valid_map(ntrode_metadata: list) -> list:
electrodes_valid_map = []
for ntrode in ntrode_metadata:
- bad_channels = [int(bad_channel) for bad_channel in ntrode['bad_channels']]
+ bad_channels = [int(bad_channel)
+ for bad_channel in ntrode['bad_channels']]
electrodes_valid_map.extend(
- [bool(counter not in bad_channels) for counter, _ in enumerate(ntrode['map'])]
+ [bool(int(channel) not in bad_channels)
+ for channel in ntrode['map']]
)
return electrodes_valid_map
@beartype
def __get_electrode_groups_valid_map(self, ntrode_metadata: list,
electrodes_valid_map: list) -> set:
- tmp_electrodes_valid_map = copy.deepcopy(electrodes_valid_map)
return {
- ntrode['electrode_group_id']
- for ntrode in ntrode_metadata
- if self.__is_ntrode_valid(ntrode, tmp_electrodes_valid_map)
+ ntrode['electrode_group_id'] for ntrode in ntrode_metadata
}
- @staticmethod
- def __is_ntrode_valid(ntrode, electrodes_valid_map):
- is_valid = False
- for _ in ntrode['map']:
- if electrodes_valid_map.pop(0):
- is_valid = True
- return is_valid
-
@staticmethod
@beartype
def __get_probes_valid_map(electrode_groups_metadata: list, electrode_groups_valid_map: set) -> set:
@@ -92,6 +82,5 @@ def __validate_data(probes_valid_map):
if probe_type:
corrupted_data = False
if corrupted_data:
- raise CorruptedDataException('There is no valid data to create probe')
-
-
+ raise CorruptedDataException(
+ 'There is no valid data to create probe')
diff --git a/rec_to_nwb/processing/metadata/metadata_extractor.py b/rec_to_nwb/processing/metadata/metadata_extractor.py
index c92c5ff9e..3c7e92238 100644
--- a/rec_to_nwb/processing/metadata/metadata_extractor.py
+++ b/rec_to_nwb/processing/metadata/metadata_extractor.py
@@ -1,12 +1,20 @@
-import yaml
import json
+import yaml
-class MetadataExtractor:
+class MetadataExtractor:
@staticmethod
def extract_metadata(metadata_path):
- with open(metadata_path, 'r') as stream:
- metadata_dict = yaml.safe_load(stream)
- metadata = json.loads(json.dumps(metadata_dict), parse_int=str, parse_float=str)
- return metadata
+ with open(metadata_path, "r") as stream:
+ yaml_dict = yaml.safe_load(stream)
+
+ try:
+ # yaml automatically converts date_of_birth to a datetime object, need to convert back
+ yaml_dict["subject"]["date_of_birth"] = yaml_dict["subject"][
+ "date_of_birth"
+ ].strftime("%Y-%m-%dT%H:%M:%S.%fZ")
+ except KeyError:
+ raise ValueError("date of birth is required in metadata yml file")
+ # for some reason they want to convert all ints, float to strings
+ return json.loads(json.dumps(yaml_dict), parse_int=str, parse_float=str)
diff --git a/rec_to_nwb/processing/metadata/metadata_manager.py b/rec_to_nwb/processing/metadata/metadata_manager.py
index 5c2d3983c..760a75756 100644
--- a/rec_to_nwb/processing/metadata/metadata_manager.py
+++ b/rec_to_nwb/processing/metadata/metadata_manager.py
@@ -1,9 +1,12 @@
from rec_to_nwb.processing.metadata.metadata_extractor import MetadataExtractor
-from rec_to_nwb.processing.nwb.components.device.probe.fl_probe_extractor import FlProbesExtractor
+from rec_to_nwb.processing.nwb.components.device.probe.fl_probe_extractor import \
+ FlProbesExtractor
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.validation.metadata_validator import MetadataValidator
-from rec_to_nwb.processing.validation.validation_registrator import ValidationRegistrator
+from rec_to_nwb.processing.validation.metadata_validator import \
+ MetadataValidator
+from rec_to_nwb.processing.validation.validation_registrator import \
+ ValidationRegistrator
class MetadataManager:
@@ -12,7 +15,7 @@ class MetadataManager:
metadata_path (string): path to file .yml with metadata describing experiment
probes_paths (list of strings): list of paths to .yml files with data describing probes used in experiment
"""
-
+
@beartype
def __init__(self, metadata_path: str, probes_paths: list):
self.__validate(metadata_path, probes_paths)
@@ -29,7 +32,8 @@ def __init__(self, metadata_path: str, probes_paths: list):
@staticmethod
def __validate(metadata_path, probes_paths):
validation_registrator = ValidationRegistrator()
- validation_registrator.register(MetadataValidator(metadata_path, probes_paths))
+ validation_registrator.register(
+ MetadataValidator(metadata_path, probes_paths))
validation_registrator.validate()
def __get_metadata(self, metadata_path):
@@ -39,11 +43,18 @@ def __get_probes(self, probes_paths):
return self.fl_probes_extractor.extract_probes_metadata(probes_paths)
def __str__(self):
- metadata_info = 'Experimenter: ' + self.metadata['experimenter name'] + \
- '\nDescription: ' + self.metadata['experiment description'] + \
- '\nSession Id: ' + self.metadata['session_id'] + \
- '\nSubject: ' + self.metadata['subject']['description']
+
+ experimenter_name = self.metadata['experimenter_name']
+ if not isinstance(experimenter_name, str):
+ experimenter_name = ', '.join(experimenter_name)
+
+ metadata_info = (
+ 'Experimenter: ' + experimenter_name +
+ '\nDescription: ' + self.metadata['experiment_description'] +
+ '\nSession Id: ' + self.metadata['session_id'] +
+ '\nSubject: ' + self.metadata['subject']['description'])
probe_types = list(map(lambda probe: probe['probe_type'], self.probes))
probe_types_info = '\n\nAvailable probe types: ' + str(probe_types)
+
return 'Experiment Info:\n' + metadata_info + probe_types_info
diff --git a/rec_to_nwb/processing/nwb/common/data_manager.py b/rec_to_nwb/processing/nwb/common/data_manager.py
index 7ac5747dc..9414e83c7 100644
--- a/rec_to_nwb/processing/nwb/common/data_manager.py
+++ b/rec_to_nwb/processing/nwb/common/data_manager.py
@@ -9,8 +9,9 @@ def __init__(self, directories):
self.number_of_datasets = self.get_number_of_datasets()
self.number_of_files_per_dataset = self.get_number_of_files_per_dataset()
- self.number_of_rows_per_file = self._get_data_shape(0)[0]
- self.file_lenghts_in_datasets = self._get_file_length(self.number_of_datasets)
+ self.number_of_rows_per_file = self._get_number_of_rows_per_file()
+ self.file_lengths_in_datasets = self._get_file_length(
+ self.number_of_datasets)
@abc.abstractmethod
def read_data(self, dataset_id, file_id):
@@ -19,11 +20,14 @@ def read_data(self, dataset_id, file_id):
def get_number_of_files_per_dataset(self):
return np.shape(self.directories)[1]
- def _get_data_shape(self, dataset_num):
- dim1 = np.shape(self.read_data(dataset_num, 0))[0]
- dim2 = np.shape(self.read_data(dataset_num, 0))[1]
+ def _get_data_shape(self, dataset_num, file_num=0):
+ dim1 = np.shape(self.read_data(dataset_num, file_num))[0]
+ dim2 = np.shape(self.read_data(dataset_num, file_num))[1]
return dim1, dim2
+ def get_data_shape(self, dataset_num, file_num=0):
+ return self._get_data_shape(dataset_num, file_num)
+
def _get_file_length(self, number_of_datasets):
return [self._get_data_shape(i)[1] for i in range(number_of_datasets)]
@@ -31,13 +35,20 @@ def get_number_of_datasets(self):
return np.shape(self.directories)[0]
def get_final_data_shape(self):
- return self.number_of_rows_per_file * self.number_of_files_per_dataset, sum(self.file_lenghts_in_datasets)
+ # return self.number_of_rows_per_file * self.number_of_files_per_dataset, sum(self.file_lengths_in_datasets)
+ return (sum(self.number_of_rows_per_file), sum(self.file_lengths_in_datasets))
def get_directories(self):
return self.directories
+ def _get_number_of_rows_per_file(self):
+ dataset_num = 0 # assume that all datasets have identical structures
+ # but files may have different numbers of rows
+ return [np.shape(self.read_data(dataset_num, file_num))[0]
+ for file_num in range(self.number_of_files_per_dataset)]
+
def get_number_of_rows_per_file(self):
return self.number_of_rows_per_file
- def get_file_lenghts_in_datasets(self):
- return self.file_lenghts_in_datasets
+ def get_file_lengths_in_datasets(self):
+ return self.file_lengths_in_datasets
diff --git a/rec_to_nwb/processing/nwb/common/old_timestamps_manager.py b/rec_to_nwb/processing/nwb/common/old_timestamps_manager.py
deleted file mode 100644
index b4d8fe01c..000000000
--- a/rec_to_nwb/processing/nwb/common/old_timestamps_manager.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import abc
-import logging.config
-import os
-
-import numpy as np
-
-
-path = os.path.dirname(os.path.abspath(__file__))
-
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class OldTimestampManager(abc.ABC):
- def __init__(self, directories):
- self.directories = directories
- self.number_of_datasets = self._get_number_of_datasets()
- self.file_lenghts_in_datasets = self.__calculate_file_lenghts_in_datasets()
-
- @abc.abstractmethod
- def _get_timestamps(self, dataset_id):
- pass
-
- def read_timestamps_ids(self, dataset_id):
- return self._get_timestamps(dataset_id)
-
- def get_final_data_shape(self):
- return sum(self.file_lenghts_in_datasets),
-
- def get_number_of_datasets(self):
- return self.number_of_datasets
-
- def get_file_lenghts_in_datasets(self):
- return self.file_lenghts_in_datasets
-
- def __calculate_file_lenghts_in_datasets(self):
- return [self._get_data_shape(i) for i in range(self.number_of_datasets)]
-
- def _get_number_of_datasets(self):
- return np.shape(self.directories)[0]
-
- def _get_data_shape(self, dataset_num):
- return np.shape(self.read_timestamps_ids(dataset_num))[0]
\ No newline at end of file
diff --git a/rec_to_nwb/processing/nwb/common/session_time_extractor.py b/rec_to_nwb/processing/nwb/common/session_time_extractor.py
index cd2ada4db..59cbb59e5 100644
--- a/rec_to_nwb/processing/nwb/common/session_time_extractor.py
+++ b/rec_to_nwb/processing/nwb/common/session_time_extractor.py
@@ -1,3 +1,4 @@
+import os
from datetime import datetime
from dateutil.tz import tzlocal
@@ -13,12 +14,15 @@ def __init__(self, datasets, animal_name, date, dataset_names):
self.dataset_names = dataset_names
def get_session_start_time(self):
- continuous_time_file = \
- self.datasets[0].data['time'] + '/' + self.date + '_' + self.animal_name + '_' \
- + self.dataset_names[0] + '.continuoustime.dat'
- continuous_time = SessionTimeExtractor.__read_continuous_time(continuous_time_file)
+ continuous_time_file = os.path.join(
+ self.datasets[0].data['time'],
+ self.date + '_' + self.animal_name + '_' + self.dataset_names[0] +
+ '.continuoustime.dat')
+ continuous_time = SessionTimeExtractor.__read_continuous_time(
+ continuous_time_file)
session_start_timestamp = continuous_time['system_time_at_creation']
- session_start_datetime = datetime.fromtimestamp(int(session_start_timestamp)/1E3, tzlocal())
+ session_start_datetime = datetime.fromtimestamp(
+ int(session_start_timestamp) / 1E3, tzlocal())
return session_start_datetime
@staticmethod
diff --git a/rec_to_nwb/processing/nwb/common/timestamps_manager.py b/rec_to_nwb/processing/nwb/common/timestamps_manager.py
index 9946d7427..8bf7c5021 100644
--- a/rec_to_nwb/processing/nwb/common/timestamps_manager.py
+++ b/rec_to_nwb/processing/nwb/common/timestamps_manager.py
@@ -3,13 +3,16 @@
import os
import numpy as np
-
-from rec_to_nwb.processing.time.continuous_time_extractor import ContinuousTimeExtractor
+from rec_to_nwb.processing.time.continuous_time_extractor import \
+ ContinuousTimeExtractor
from rec_to_nwb.processing.time.timestamp_converter import TimestampConverter
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -21,35 +24,37 @@ def __init__(self, directories, continuous_time_directories):
self.timestamp_converter = TimestampConverter()
self.number_of_datasets = self._get_number_of_datasets()
- self.file_lenghts_in_datasets = self.__calculate_file_lenghts_in_datasets()
+ self.file_lengths_in_datasets = self.__calculate_file_lengths_in_datasets()
@abc.abstractmethod
def _get_timestamps(self, dataset_id):
pass
- def retrieve_real_timestamps(self, dataset_id):
+ def retrieve_real_timestamps(self, dataset_id, convert_timestamps=True):
timestamps_ids = self.read_timestamps_ids(dataset_id)
- continuous_time_dict = self.continuous_time_extractor.get_continuous_time_dict_file(
+ if not convert_timestamps:
+ return timestamps_ids
+ continuous_time = self.continuous_time_extractor.get_continuous_time_array_file(
self.continuous_time_directories[dataset_id])
- return self.timestamp_converter.convert_timestamps(continuous_time_dict, timestamps_ids)
+ return self.timestamp_converter.convert_timestamps(continuous_time, timestamps_ids)
def read_timestamps_ids(self, dataset_id):
return self._get_timestamps(dataset_id)
def get_final_data_shape(self):
- return sum(self.file_lenghts_in_datasets),
+ return sum(self.file_lengths_in_datasets),
def get_number_of_datasets(self):
return self.number_of_datasets
- def get_file_lenghts_in_datasets(self):
- return self.file_lenghts_in_datasets
+ def get_file_lengths_in_datasets(self):
+ return self.file_lengths_in_datasets
- def __calculate_file_lenghts_in_datasets(self):
+ def __calculate_file_lengths_in_datasets(self):
return [self._get_data_shape(i) for i in range(self.number_of_datasets)]
def _get_number_of_datasets(self):
return np.shape(self.directories)[0]
def _get_data_shape(self, dataset_num):
- return np.shape(self.read_timestamps_ids(dataset_num))[0]
\ No newline at end of file
+ return np.shape(self.read_timestamps_ids(dataset_num))[0]
diff --git a/rec_to_nwb/processing/nwb/components/analog/analog_files.py b/rec_to_nwb/processing/nwb/components/analog/analog_files.py
index e7502bb62..4a29afa30 100644
--- a/rec_to_nwb/processing/nwb/components/analog/analog_files.py
+++ b/rec_to_nwb/processing/nwb/components/analog/analog_files.py
@@ -1,8 +1,8 @@
+import glob
import os
class AnalogFiles:
-
def __init__(self, directories):
self.directories = directories
@@ -12,10 +12,10 @@ def get_files(self):
@classmethod
def __get_dict(cls, directory):
analog_dict = {}
- files = os.listdir(directory)
- files.sort()
- for file in files:
- if file.endswith('.dat'):
- split_filename = file.split('.')
- analog_dict[split_filename[-2].split('_')[-1]] = directory + '/' + file
+ for file in glob.glob(os.path.join(directory, "*.dat")):
+ if "timestamps" in file:
+ analog_name = "timestamps"
+ else:
+ analog_name = file.split(".")[-2]
+ analog_dict[analog_name] = os.path.join(directory, file)
return analog_dict
diff --git a/rec_to_nwb/processing/nwb/components/analog/analog_injector.py b/rec_to_nwb/processing/nwb/components/analog/analog_injector.py
index 0dda3da32..c8dc9e8d9 100644
--- a/rec_to_nwb/processing/nwb/components/analog/analog_injector.py
+++ b/rec_to_nwb/processing/nwb/components/analog/analog_injector.py
@@ -6,4 +6,5 @@ def __init__(self, nwb_content):
def inject(self, behavioral_events, processing_module_name):
"""insert behavioral events to specified processing module in nwb file"""
- self.nwb_content.processing[processing_module_name].add(behavioral_events)
+ self.nwb_content.processing[processing_module_name].add(
+ behavioral_events)
diff --git a/rec_to_nwb/processing/nwb/components/analog/fl_analog_extractor.py b/rec_to_nwb/processing/nwb/components/analog/fl_analog_extractor.py
index 175441965..9d755e29e 100644
--- a/rec_to_nwb/processing/nwb/components/analog/fl_analog_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/analog/fl_analog_extractor.py
@@ -2,32 +2,44 @@
import os
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
-from rec_to_nwb.processing.time.continuous_time_extractor import ContinuousTimeExtractor
+from rec_to_nwb.processing.time.continuous_time_extractor import \
+ ContinuousTimeExtractor
from rec_to_nwb.processing.time.timestamp_converter import TimestampConverter
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class FlAnalogExtractor:
@staticmethod
- def extract_analog_for_single_dataset(analog_files, continuous_time_file):
+ def extract_analog_for_single_dataset(analog_files, continuous_time_file,
+ convert_timestamps=True):
single_dataset_data = {}
- for analog_file in analog_files:
- if not 'timestamps' in analog_file:
- analog_data = readTrodesExtractedDataFile(analog_files[analog_file])
+ for analog_sensor in analog_files:
+ analog_data = readTrodesExtractedDataFile(
+ analog_files[analog_sensor])
+ if 'timestamps' not in analog_sensor:
values = analog_data['data']
single_dataset_data[analog_data['id']] = values
else:
- continuous_time_dict = ContinuousTimeExtractor.get_continuous_time_dict_file(continuous_time_file)
- timestamp = readTrodesExtractedDataFile(analog_files[analog_file])
- keys = [key[0] for key in timestamp['data']]
- single_dataset_data[analog_file] = TimestampConverter.convert_timestamps(continuous_time_dict, keys)
+ timestamps = FlAnalogExtractor._extract_analog_timestamps(
+ analog_data, continuous_time_file, convert_timestamps)
+ single_dataset_data[analog_sensor] = timestamps
return single_dataset_data
-
-
-
+ @staticmethod
+ def _extract_analog_timestamps(analog_data, continuous_time_file, convert_timestamps):
+ timestamps = analog_data['data']['time']
+ if convert_timestamps:
+ continuous_time = ContinuousTimeExtractor.get_continuous_time_array_file(
+ continuous_time_file)
+ return TimestampConverter.convert_timestamps(continuous_time, timestamps)
+ else:
+ # for old dataset, skip real-time conversion and just report Trodes time counts,
+ # because the adjusted_systime is not ready
+ return timestamps
diff --git a/rec_to_nwb/processing/nwb/components/analog/fl_analog_manager.py b/rec_to_nwb/processing/nwb/components/analog/fl_analog_manager.py
index fc51c315b..c4072de7d 100644
--- a/rec_to_nwb/processing/nwb/components/analog/fl_analog_manager.py
+++ b/rec_to_nwb/processing/nwb/components/analog/fl_analog_manager.py
@@ -1,24 +1,36 @@
import numpy as np
-
from rec_to_nwb.processing.nwb.components.analog.fl_analog import FlAnalog
-from rec_to_nwb.processing.nwb.components.analog.fl_analog_builder import FlAnalogBuilder
-from rec_to_nwb.processing.nwb.components.analog.fl_analog_extractor import FlAnalogExtractor
+from rec_to_nwb.processing.nwb.components.analog.fl_analog_builder import (
+ FlAnalogBuilder,
+)
+from rec_to_nwb.processing.nwb.components.analog.fl_analog_extractor import (
+ FlAnalogExtractor,
+)
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_equal_length
+from rec_to_nwb.processing.tools.validate_parameters import (
+ validate_parameters_equal_length,
+)
class FlAnalogManager:
-
@beartype
- def __init__(self, analog_files: list, continuous_time_files: list):
+ def __init__(
+ self,
+ analog_files: list,
+ continuous_time_files: list,
+ convert_timestamps: bool = True,
+ return_timestamps: bool = True,
+ ):
validate_parameters_equal_length(__name__, analog_files, continuous_time_files)
self.analog_files = analog_files
self.continuous_time_files = continuous_time_files
+ self.convert_timestamps = convert_timestamps
+ self.return_timestamps = return_timestamps
@beartype
def get_analog(self) -> FlAnalog:
- """"extract data from analog files"""
+ """extract data from analog files"""
all_analog_data = []
number_of_datasets = len(self.analog_files)
@@ -26,33 +38,46 @@ def get_analog(self) -> FlAnalog:
all_analog_data.append(
FlAnalogExtractor.extract_analog_for_single_dataset(
self.analog_files[i],
- self.continuous_time_files[i]
+ self.continuous_time_files[i],
+ convert_timestamps=self.convert_timestamps,
)
)
merged_epochs = self.__merge_epochs(all_analog_data)
description = self.__merge_row_description(all_analog_data)
analog_data = self.__merge_analog_sensors(merged_epochs)
- return FlAnalogBuilder.build(analog_data, self.__get_timestamps(merged_epochs), description)
+
+ if self.return_timestamps:
+ timestamps = self.__get_timestamps(merged_epochs)
+ else:
+ timestamps = []
+ return FlAnalogBuilder.build(analog_data, timestamps, description)
@staticmethod
def __merge_epochs(data_from_multiple_datasets):
merged_epochs = data_from_multiple_datasets[0]
for single_dataset_data in data_from_multiple_datasets[1:]:
for row in single_dataset_data.keys():
- merged_epochs[row] = np.hstack((merged_epochs[row], single_dataset_data[row]))
+ merged_epochs[row] = np.hstack(
+ (merged_epochs[row], single_dataset_data[row])
+ )
return merged_epochs
@staticmethod
def __merge_row_description(data_from_multiple_datasets):
row_ids = data_from_multiple_datasets[0].keys()
- description = ''
+ description = ""
for id in row_ids:
- description += id + ' '
+ if "timestamp" not in id:
+ description += id + " "
return description
@classmethod
def __merge_analog_sensors(cls, merged_epochs):
- analog_sensors = [merged_epochs[analog_sensor] for analog_sensor in merged_epochs.keys() if 'timestamp' not in analog_sensor]
+ analog_sensors = [
+ merged_epochs[analog_sensor]
+ for analog_sensor in merged_epochs.keys()
+ if "timestamp" not in analog_sensor
+ ]
merged_analog_sensors = np.array(analog_sensors, np.int32)
transposed_analog_data = np.ndarray.transpose(merged_analog_sensors)
return transposed_analog_data
@@ -60,5 +85,5 @@ def __merge_analog_sensors(cls, merged_epochs):
@classmethod
def __get_timestamps(cls, merged_epochs):
for analog_sensor in merged_epochs.keys():
- if 'timestamps' in analog_sensor:
+ if "timestamps" in analog_sensor:
return merged_epochs[analog_sensor]
diff --git a/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_builder.py b/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_builder.py
deleted file mode 100644
index 44d97cec1..000000000
--- a/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_builder.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from rec_to_nwb.processing.nwb.components.analog.fl_analog import FlAnalog
-
-
-class OldFlAnalogBuilder:
-
- @staticmethod
- def build(data, timestamps, description):
- return FlAnalog(data, timestamps, description)
diff --git a/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_extractor.py b/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_extractor.py
deleted file mode 100644
index 25b2d3425..000000000
--- a/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_extractor.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import logging.config
-import os
-
-from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class OldFlAnalogExtractor:
-
- @staticmethod
- def extract_analog_for_single_dataset(analog_files):
- single_dataset_data = {}
- for analog_file in analog_files:
- analog_data = readTrodesExtractedDataFile(analog_files[analog_file])
- values = analog_data['data']
- single_dataset_data[analog_data['id']] = values
- return single_dataset_data
diff --git a/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_manager.py b/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_manager.py
deleted file mode 100644
index cfe9b608d..000000000
--- a/rec_to_nwb/processing/nwb/components/analog/old_fl_analog_manager.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import numpy as np
-
-from rec_to_nwb.processing.nwb.components.analog.fl_analog import FlAnalog
-from rec_to_nwb.processing.nwb.components.analog.old_fl_analog_builder import OldFlAnalogBuilder
-from rec_to_nwb.processing.nwb.components.analog.old_fl_analog_extractor import OldFlAnalogExtractor
-from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_equal_length
-
-
-class OldFlAnalogManager:
-
- @beartype
- def __init__(self, analog_files: list):
- validate_parameters_equal_length(__name__, analog_files)
-
- self.analog_files = analog_files
-
- @beartype
- def get_analog(self) -> FlAnalog:
- """"extract data from analog files"""
-
- all_analog_data = []
- number_of_datasets = len(self.analog_files)
- for i in range(number_of_datasets):
- all_analog_data.append(
- OldFlAnalogExtractor.extract_analog_for_single_dataset(
- self.analog_files[i]
- )
- )
- merged_epochs = self.__merge_epochs(all_analog_data)
- description = self.__merge_row_description(all_analog_data)
- analog_data = self.__merge_analog_sensors(merged_epochs)
-
- timestamps = []
- return OldFlAnalogBuilder.build(analog_data, timestamps, description)
-
- @staticmethod
- def __merge_epochs(data_from_multiple_datasets):
- merged_epochs = data_from_multiple_datasets[0]
- for single_dataset_data in data_from_multiple_datasets[1:]:
- for row in single_dataset_data.keys():
- merged_epochs[row] = np.hstack((merged_epochs[row], single_dataset_data[row]))
- return merged_epochs
-
- @staticmethod
- def __merge_row_description(data_from_multiple_datasets):
- row_ids = data_from_multiple_datasets[0].keys()
- description = ''
- for id in row_ids:
- description += id + ' '
- return description
-
- @classmethod
- def __merge_analog_sensors(cls, merged_epochs):
- analog_sensors = [merged_epochs[analog_sensor] for analog_sensor in merged_epochs.keys() if
- 'timestamp' not in analog_sensor]
- merged_analog_sensors = np.array(analog_sensors, np.int32)
- transposed_analog_data = np.ndarray.transpose(merged_analog_sensors)
- return transposed_analog_data
diff --git a/rec_to_nwb/processing/nwb/components/associated_files/associated_files_creator.py b/rec_to_nwb/processing/nwb/components/associated_files/associated_files_creator.py
index 1eb13cbaf..834e0f4cb 100644
--- a/rec_to_nwb/processing/nwb/components/associated_files/associated_files_creator.py
+++ b/rec_to_nwb/processing/nwb/components/associated_files/associated_files_creator.py
@@ -1,4 +1,4 @@
-from ndx_franklab_novela.associated_files import AssociatedFiles
+from ndx_franklab_novela import AssociatedFiles
class AssociatedFilesCreator:
@@ -6,9 +6,8 @@ class AssociatedFilesCreator:
@classmethod
def create(cls, fl_associated_file):
return AssociatedFiles(
- name=fl_associated_file.name,
- description=fl_associated_file.description,
- content=fl_associated_file.content,
- task_epochs=fl_associated_file.task_epochs
- )
-
+ name=fl_associated_file.name,
+ description=fl_associated_file.description,
+ content=fl_associated_file.content,
+ task_epochs=fl_associated_file.task_epochs
+ )
diff --git a/rec_to_nwb/processing/nwb/components/associated_files/fl_associated_files_builder.py b/rec_to_nwb/processing/nwb/components/associated_files/fl_associated_files_builder.py
index 7694bcc9a..008bb205a 100644
--- a/rec_to_nwb/processing/nwb/components/associated_files/fl_associated_files_builder.py
+++ b/rec_to_nwb/processing/nwb/components/associated_files/fl_associated_files_builder.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_file import FlAssociatedFile
+from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_file import \
+ FlAssociatedFile
class FlAssociatedFilesBuilder:
diff --git a/rec_to_nwb/processing/nwb/components/associated_files/fl_associated_files_manager.py b/rec_to_nwb/processing/nwb/components/associated_files/fl_associated_files_manager.py
index 41c13dfdd..bc49ec25e 100644
--- a/rec_to_nwb/processing/nwb/components/associated_files/fl_associated_files_manager.py
+++ b/rec_to_nwb/processing/nwb/components/associated_files/fl_associated_files_manager.py
@@ -1,5 +1,7 @@
-from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_files_builder import FlAssociatedFilesBuilder
-from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_files_reader import FlAssociatedFilesReader
+from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_files_builder import \
+ FlAssociatedFilesBuilder
+from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_files_reader import \
+ FlAssociatedFilesReader
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device.py b/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device.py
index 3859cda8b..a50b8fa67 100644
--- a/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device.py
+++ b/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device.py
@@ -4,4 +4,4 @@ def __init__(self, name, system, amplifier, adc_circuit):
self.name = name
self.system = system
self.amplifier = amplifier
- self.adc_circuit = adc_circuit
\ No newline at end of file
+ self.adc_circuit = adc_circuit
diff --git a/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device_builder.py b/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device_builder.py
index f6c7c6e05..57d08f09e 100644
--- a/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device_builder.py
+++ b/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device_builder.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.nwb.components.device.acq.fl_data_acq_device import FlDataAcqDevice
+from rec_to_nwb.processing.nwb.components.device.acq.fl_data_acq_device import \
+ FlDataAcqDevice
from rec_to_nwb.processing.tools.beartype.beartype import beartype
@@ -12,4 +13,4 @@ def build(name, system, amplifier, adc_circuit):
system=system,
amplifier=amplifier,
adc_circuit=adc_circuit
- )
\ No newline at end of file
+ )
diff --git a/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device_manager.py b/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device_manager.py
index ebb6d8463..871762ee6 100644
--- a/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device_manager.py
+++ b/rec_to_nwb/processing/nwb/components/device/acq/fl_data_acq_device_manager.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.nwb.components.device.acq.fl_data_acq_device_builder import FlDataAcqDeviceBuilder
+from rec_to_nwb.processing.nwb.components.device.acq.fl_data_acq_device_builder import \
+ FlDataAcqDeviceBuilder
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/device/camera/fl_camera_device_builder.py b/rec_to_nwb/processing/nwb/components/device/camera/fl_camera_device_builder.py
index 6e86c4bf1..406b91e32 100644
--- a/rec_to_nwb/processing/nwb/components/device/camera/fl_camera_device_builder.py
+++ b/rec_to_nwb/processing/nwb/components/device/camera/fl_camera_device_builder.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.nwb.components.device.camera.fl_camera_device import FlCameraDevice
+from rec_to_nwb.processing.nwb.components.device.camera.fl_camera_device import \
+ FlCameraDevice
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/device/camera/fl_camera_device_manager.py b/rec_to_nwb/processing/nwb/components/device/camera/fl_camera_device_manager.py
index 6337301e5..43194339a 100644
--- a/rec_to_nwb/processing/nwb/components/device/camera/fl_camera_device_manager.py
+++ b/rec_to_nwb/processing/nwb/components/device/camera/fl_camera_device_manager.py
@@ -1,6 +1,8 @@
-from rec_to_nwb.processing.nwb.components.device.camera.fl_camera_device_builder import FlCameraDeviceBuilder
+from rec_to_nwb.processing.nwb.components.device.camera.fl_camera_device_builder import \
+ FlCameraDeviceBuilder
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_not_none
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_not_none
class FlCameraDeviceManager:
diff --git a/rec_to_nwb/processing/nwb/components/device/device_factory.py b/rec_to_nwb/processing/nwb/components/device/device_factory.py
index 007c9f9d4..b813b2cea 100644
--- a/rec_to_nwb/processing/nwb/components/device/device_factory.py
+++ b/rec_to_nwb/processing/nwb/components/device/device_factory.py
@@ -1,16 +1,16 @@
-from ndx_franklab_novela.camera_device import CameraDevice
-from ndx_franklab_novela.data_acq_device import DataAcqDevice
-from ndx_franklab_novela.header_device import HeaderDevice
-from ndx_franklab_novela.probe import Probe
+from ndx_franklab_novela import CameraDevice, DataAcqDevice, HeaderDevice, Probe
from pynwb.device import Device
-
-from rec_to_nwb.processing.nwb.components.device.acq.fl_data_acq_device import FlDataAcqDevice
-from rec_to_nwb.processing.nwb.components.device.camera.fl_camera_device import FlCameraDevice
+from rec_to_nwb.processing.nwb.components.device.acq.fl_data_acq_device import \
+ FlDataAcqDevice
+from rec_to_nwb.processing.nwb.components.device.camera.fl_camera_device import \
+ FlCameraDevice
from rec_to_nwb.processing.nwb.components.device.fl_device import FlDevice
-from rec_to_nwb.processing.nwb.components.device.header.fl_header_device import FlHeaderDevice
+from rec_to_nwb.processing.nwb.components.device.header.fl_header_device import \
+ FlHeaderDevice
from rec_to_nwb.processing.nwb.components.device.probe.fl_probe import FlProbe
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_not_none
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_not_none
class DeviceFactory:
@@ -26,7 +26,8 @@ def create_device(cls, fl_device: FlDevice) -> Device:
@classmethod
@beartype
def create_camera_device(cls, fl_camera_device: FlCameraDevice) -> CameraDevice:
- validate_parameters_not_none(__name__, fl_camera_device.name, fl_camera_device.meters_per_pixel)
+ validate_parameters_not_none(
+ __name__, fl_camera_device.name, fl_camera_device.meters_per_pixel)
return CameraDevice(
name=fl_camera_device.name,
meters_per_pixel=fl_camera_device.meters_per_pixel,
@@ -71,27 +72,36 @@ def create_data_acq_device(cls, fl_data_acq_device: FlDataAcqDevice) -> DataAcqD
@classmethod
@beartype
def create_header_device(cls, fl_header_device: FlHeaderDevice) -> HeaderDevice:
- validate_parameters_not_none(__name__, fl_header_device.name, fl_header_device.global_configuration)
+ validate_parameters_not_none(
+ __name__, fl_header_device.name, fl_header_device.global_configuration)
return HeaderDevice(
name=fl_header_device.name,
headstage_serial=fl_header_device.global_configuration['headstage_serial'],
headstage_smart_ref_on=fl_header_device.global_configuration['headstage_smart_ref_on'],
realtime_mode=fl_header_device.global_configuration['realtime_mode'],
- headstage_auto_settle_on=fl_header_device.global_configuration['headstage_auto_settle_on'],
+ headstage_auto_settle_on=fl_header_device.global_configuration[
+ 'headstage_auto_settle_on'],
timestamp_at_creation=fl_header_device.global_configuration['timestamp_at_creation'],
- controller_firmware_version=fl_header_device.global_configuration['controller_firmware_version'],
+ controller_firmware_version=fl_header_device.global_configuration[
+ 'controller_firmware_version'],
controller_serial=fl_header_device.global_configuration['controller_serial'],
- save_displayed_chan_only=fl_header_device.global_configuration['save_displayed_chan_only'],
- headstage_firmware_version=fl_header_device.global_configuration['headstage_firmware_version'],
+ save_displayed_chan_only=fl_header_device.global_configuration[
+ 'save_displayed_chan_only'],
+ headstage_firmware_version=fl_header_device.global_configuration[
+ 'headstage_firmware_version'],
qt_version=fl_header_device.global_configuration['qt_version'],
compile_date=fl_header_device.global_configuration['compile_date'],
compile_time=fl_header_device.global_configuration['compile_time'],
file_prefix=fl_header_device.global_configuration['file_prefix'],
- headstage_gyro_sensor_on=fl_header_device.global_configuration['headstage_gyro_sensor_on'],
- headstage_mag_sensor_on=fl_header_device.global_configuration['headstage_mag_sensor_on'],
+ headstage_gyro_sensor_on=fl_header_device.global_configuration[
+ 'headstage_gyro_sensor_on'],
+ headstage_mag_sensor_on=fl_header_device.global_configuration[
+ 'headstage_mag_sensor_on'],
trodes_version=fl_header_device.global_configuration['trodes_version'],
- headstage_accel_sensor_on=fl_header_device.global_configuration['headstage_accel_sensor_on'],
+ headstage_accel_sensor_on=fl_header_device.global_configuration[
+ 'headstage_accel_sensor_on'],
commit_head=fl_header_device.global_configuration['commit_head'],
- system_time_at_creation=fl_header_device.global_configuration['system_time_at_creation'],
+ system_time_at_creation=fl_header_device.global_configuration[
+ 'system_time_at_creation'],
file_path=fl_header_device.global_configuration['file_path']
)
diff --git a/rec_to_nwb/processing/nwb/components/device/device_injector.py b/rec_to_nwb/processing/nwb/components/device/device_injector.py
index ec5775aaf..a08eb7efe 100644
--- a/rec_to_nwb/processing/nwb/components/device/device_injector.py
+++ b/rec_to_nwb/processing/nwb/components/device/device_injector.py
@@ -1,5 +1,4 @@
from pynwb import NWBFile
-
from rec_to_nwb.processing.tools.beartype.beartype import beartype
@@ -12,4 +11,4 @@ def inject_all_devices(self, nwb_content: NWBFile, devices: list):
@staticmethod
def __inject_device(nwb_content, device):
- nwb_content.add_device(device)
\ No newline at end of file
+ nwb_content.add_device(device)
diff --git a/rec_to_nwb/processing/nwb/components/device/header/fl_header_device.py b/rec_to_nwb/processing/nwb/components/device/header/fl_header_device.py
index e551e6af3..154457ce3 100644
--- a/rec_to_nwb/processing/nwb/components/device/header/fl_header_device.py
+++ b/rec_to_nwb/processing/nwb/components/device/header/fl_header_device.py
@@ -2,5 +2,7 @@ class FlHeaderDevice:
def __init__(self, name, global_configuration_dict):
self.name = name
+ for parameter in global_configuration_dict:
+ if global_configuration_dict[parameter] is None:
+ global_configuration_dict[parameter] = ''
self.global_configuration = global_configuration_dict
-
diff --git a/rec_to_nwb/processing/nwb/components/device/header/fl_header_device_builder.py b/rec_to_nwb/processing/nwb/components/device/header/fl_header_device_builder.py
index 5b0e947ff..fb18f0f71 100644
--- a/rec_to_nwb/processing/nwb/components/device/header/fl_header_device_builder.py
+++ b/rec_to_nwb/processing/nwb/components/device/header/fl_header_device_builder.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.nwb.components.device.header.fl_header_device import FlHeaderDevice
+from rec_to_nwb.processing.nwb.components.device.header.fl_header_device import \
+ FlHeaderDevice
class FlHeaderDeviceBuilder:
diff --git a/rec_to_nwb/processing/nwb/components/device/header/fl_header_device_manager.py b/rec_to_nwb/processing/nwb/components/device/header/fl_header_device_manager.py
index 63a17a581..a25d9e1d1 100644
--- a/rec_to_nwb/processing/nwb/components/device/header/fl_header_device_manager.py
+++ b/rec_to_nwb/processing/nwb/components/device/header/fl_header_device_manager.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.nwb.components.device.header.fl_header_device_builder import FlHeaderDeviceBuilder
+from rec_to_nwb.processing.nwb.components.device.header.fl_header_device_builder import \
+ FlHeaderDeviceBuilder
class FlHeaderDeviceManager:
@@ -13,6 +14,6 @@ def get_fl_header_device(self):
def __compare_global_configuration_with_default(self):
for single_key in self.default_configuration:
- if single_key not in self.global_configuration.keys():
+ if single_key not in self.global_configuration.keys() or self.global_configuration[single_key] is None:
self.global_configuration[single_key] = self.default_configuration[single_key]
return self.global_configuration
diff --git a/rec_to_nwb/processing/nwb/components/device/probe/shanks/shank_creator.py b/rec_to_nwb/processing/nwb/components/device/probe/shanks/shank_creator.py
index 241826fa5..e3541a80a 100644
--- a/rec_to_nwb/processing/nwb/components/device/probe/shanks/shank_creator.py
+++ b/rec_to_nwb/processing/nwb/components/device/probe/shanks/shank_creator.py
@@ -1,4 +1,4 @@
-from ndx_franklab_novela.probe import Shank
+from ndx_franklab_novela import Shank
from rec_to_nwb.processing.nwb.components.device.probe.shanks.fl_shank import FlShank
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/device/probe/shanks_electrodes/shanks_electrode_creator.py b/rec_to_nwb/processing/nwb/components/device/probe/shanks_electrodes/shanks_electrode_creator.py
index 3182882fc..a204ab04c 100644
--- a/rec_to_nwb/processing/nwb/components/device/probe/shanks_electrodes/shanks_electrode_creator.py
+++ b/rec_to_nwb/processing/nwb/components/device/probe/shanks_electrodes/shanks_electrode_creator.py
@@ -1,4 +1,4 @@
-from ndx_franklab_novela.probe import ShanksElectrode
+from ndx_franklab_novela import ShanksElectrode
from rec_to_nwb.processing.nwb.components.device.probe.shanks_electrodes.fl_shanks_electrode import FlShanksElectrode
from rec_to_nwb.processing.tools.beartype.beartype import beartype
@@ -18,4 +18,4 @@ def create(cls, fl_shanks_electrode: FlShanksElectrode) -> ShanksElectrode:
rel_x=float(fl_shanks_electrode.rel_x),
rel_y=float(fl_shanks_electrode.rel_y),
rel_z=float(fl_shanks_electrode.rel_z),
- )
\ No newline at end of file
+ )
diff --git a/rec_to_nwb/processing/nwb/components/dio/dio_extractor.py b/rec_to_nwb/processing/nwb/components/dio/dio_extractor.py
index 40cb42d4b..c4e85e48b 100644
--- a/rec_to_nwb/processing/nwb/components/dio/dio_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/dio/dio_extractor.py
@@ -2,44 +2,65 @@
import os
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
-from rec_to_nwb.processing.time.continuous_time_extractor import ContinuousTimeExtractor
+from rec_to_nwb.processing.time.continuous_time_extractor import \
+ ContinuousTimeExtractor
from rec_to_nwb.processing.time.timestamp_converter import TimestampConverter
+# import numpy as np
+
+
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class DioExtractor:
@staticmethod
- def extract_dio_for_single_dataset(filtered_files, continuous_time_file):
+ def extract_dio_for_single_dataset(filtered_files, continuous_time_file,
+ convert_timestamps=True):
single_dataset_data = {}
- continuous_time_dict = ContinuousTimeExtractor.get_continuous_time_dict_file(continuous_time_file)
- for dio_file in filtered_files:
+ continuous_time = ContinuousTimeExtractor.get_continuous_time_array_file(
+ continuous_time_file)
+
+ for dio_sensor in filtered_files:
try:
- dio_data = readTrodesExtractedDataFile(filtered_files[dio_file])
- keys, values = DioExtractor.__get_dio_time_series(dio_data, continuous_time_dict)
- single_dataset_data[dio_file] = ([keys, values])
+ dio_data = readTrodesExtractedDataFile(
+ filtered_files[dio_sensor])
+ # dio_data['data'] is a labeled array with 'time' and 'state' columns. 'time' corresponds to sample count
+ single_dataset_data[dio_sensor] = DioExtractor.__get_dio_time_series(
+ dio_data, continuous_time, convert_timestamps)
+ # keys, values = DioExtractor.__get_dio_time_series(dio_data, continuous_time_dict
+ # single_dataset_data[dio_sensor] = ([keys, values])
except KeyError as error:
- message = "there is no " + str(dio_file) + ", error: "
+ message = "there is no " + str(dio_sensor) + ", error: "
logger.exception(message + str(error))
except TypeError as error:
- message = "there is no data for event " + str(dio_file) + ", error: "
+ message = "there is no data for event " + \
+ str(dio_sensor) + ", error: "
logger.exception(message + str(error))
return single_dataset_data
@staticmethod
- def __get_dio_time_series(dio_data, continuoues_time_dict):
-
- values = [bool(recorded_event[1]) for recorded_event in dio_data['data']]
- keys = [recorded_event[0] for recorded_event in dio_data['data']]
- keys = DioExtractor.__convert_keys(continuoues_time_dict, keys)
- return keys, values
+ def __get_dio_time_series(dio_data, continuous_time, convert_timestamps=True):
+ dio_state = dio_data['data']['state']
+ time_counts = dio_data['data']['time'] # time sample counts
+ if not convert_timestamps:
+ return [time_counts, dio_state]
+ converted_timestamps = TimestampConverter.convert_timestamps(
+ continuous_time, time_counts)
+ #values = np.asarray(dio_data['state'], dtype='bool')
+ # values = [bool(recorded_event[1]) for recorded_event in dio_data['data']]
+ # keys = [recorded_event[0] for recorded_event in dio_data['data']]
+ # keys = DioExtractor.__convert_keys(continuoues_time_dict, keys)
+ # return keys, values
+ return [converted_timestamps, dio_state]
- @staticmethod
- def __convert_keys(continuous_time_dict, keys):
- converted_timestamps = TimestampConverter.convert_timestamps(continuous_time_dict, keys)
- return converted_timestamps
+ # @staticmethod
+ # def __convert_keys(continuous_time_array, keys):
+ # converted_timestamps = TimestampConverter.convert_timestamps(continuous_time_array, keys)
+ # return converted_timestamps
diff --git a/rec_to_nwb/processing/nwb/components/dio/dio_files.py b/rec_to_nwb/processing/nwb/components/dio/dio_files.py
index bfa1c02c7..ad5bfc51f 100644
--- a/rec_to_nwb/processing/nwb/components/dio/dio_files.py
+++ b/rec_to_nwb/processing/nwb/components/dio/dio_files.py
@@ -1,30 +1,53 @@
+import glob
import os
class DioFiles:
-
def __init__(self, directories, dio_metadata):
self.directories = directories
self.dio_metadata = dio_metadata
def get_files(self):
- multiple_datasets_dio_files = [self.__get_dict(dataset) for dataset in self.directories]
- filtered_datasets_dio_files = self.__filter_files(multiple_datasets_dio_files, self.dio_metadata)
+ multiple_datasets_dio_files = [
+ self.__get_dict(dataset) for dataset in self.directories
+ ]
+ filtered_datasets_dio_files = self.__filter_files(
+ multiple_datasets_dio_files, self.dio_metadata
+ )
return filtered_datasets_dio_files
@classmethod
def __filter_files(cls, multiple_datasets_dio_files, dio_metadata):
- return [{dio_file: single_dataset[dio_file] for dio_file in single_dataset
- if dio_file in [dio_event['description'] for dio_event in dio_metadata]}
- for single_dataset in multiple_datasets_dio_files]
+ return [
+ {
+ dio_file: single_dataset[dio_file]
+ for dio_file in single_dataset
+ if dio_file in [dio_event["description"] for dio_event in dio_metadata]
+ }
+ for single_dataset in multiple_datasets_dio_files
+ ]
@classmethod
def __get_dict(cls, directory):
dio_dict = {}
- files = os.listdir(directory)
- files.sort()
- for file in files:
- if file.endswith('.dat'):
- split_filename = file.split('.')
- dio_dict[split_filename[-2].split('_')[1]] = directory + '/' + file
+ for file in glob.glob(os.path.join(directory, "*.dat")):
+ if file.split(".")[-2].split("_")[-2] == "MCU":
+ # To avoid this warning, remove MCU_IO data from being displayed via the .trodesconf, this will stop MCU_IO extraction
+ print(
+ f"WARNING: MCU_IO data are not currently handled by rec_to_nwb. Skipping file: {file}."
+ )
+ # TODO: find MCU_IO binaries if they exist and appropriately insert these data into nwbs in future version of rec_to_nwb
+ elif file.split(".")[-2].split("_")[-2] == "Controller":
+ print(
+ f"WARNING: Controller_IO data are not currently handled by rec_to_nwb. Skipping file: {file}."
+ )
+ else:
+ if not(file.split(".")[-2].split("_")[-2] == "ECU"):
+ print(
+ f"WARNING: {file.split('.')[-2].split('_')[-2]} is not a recognized dio type. Including file: {file}, but proceed with caution."
+ )
+ dio_name = file.split(".")[-2].split("_")[
+ -1
+ ] # This string should be of the form "Din12" "Dout5"
+ dio_dict[dio_name] = os.path.join(directory, file)
return dio_dict
diff --git a/rec_to_nwb/processing/nwb/components/dio/dio_injector.py b/rec_to_nwb/processing/nwb/components/dio/dio_injector.py
index 2e6ffcb02..cdb3e6952 100644
--- a/rec_to_nwb/processing/nwb/components/dio/dio_injector.py
+++ b/rec_to_nwb/processing/nwb/components/dio/dio_injector.py
@@ -6,4 +6,5 @@ def __init__(self, nwb_content):
def inject(self, behavioral_events, processing_module_name):
"""insert behavioral events to specified processing module in nwb file"""
- self.nwb_content.processing[processing_module_name].add(behavioral_events)
+ self.nwb_content.processing[processing_module_name].add(
+ behavioral_events)
diff --git a/rec_to_nwb/processing/nwb/components/dio/dio_manager.py b/rec_to_nwb/processing/nwb/components/dio/dio_manager.py
index 38321b8e0..f974af7e2 100644
--- a/rec_to_nwb/processing/nwb/components/dio/dio_manager.py
+++ b/rec_to_nwb/processing/nwb/components/dio/dio_manager.py
@@ -1,14 +1,14 @@
import numpy as np
-
from rec_to_nwb.processing.nwb.components.dio.dio_extractor import DioExtractor
class DioManager:
- def __init__(self, dio_files, dio_metadata, continuous_time_files):
+ def __init__(self, dio_files, dio_metadata, continuous_time_files, convert_timestamps=True):
self.dio_files = dio_files
self.dio_metadata = dio_metadata
self.continuous_time_files = continuous_time_files
+ self.convert_timestamps = convert_timestamps
def get_dio(self):
""""extract data from DIO files and match them with metadata"""
@@ -19,7 +19,8 @@ def get_dio(self):
all_dio_data.append(
DioExtractor.extract_dio_for_single_dataset(
filtered_files=self.dio_files[i],
- continuous_time_file=self.continuous_time_files[i]
+ continuous_time_file=self.continuous_time_files[i],
+ convert_timestamps=self.convert_timestamps
)
)
return self.__merge_dio_data(all_dio_data)
@@ -29,7 +30,9 @@ def __merge_dio_data(cls, data_from_multiple_datasets):
merged_data = data_from_multiple_datasets[0]
for single_dataset_data in data_from_multiple_datasets[1:]:
for event, timeseries in single_dataset_data.items():
- merged_data[event][0] = np.hstack((merged_data[event][0], timeseries[0]))
- merged_data[event][1].extend(timeseries[1])
+ merged_data[event][0] = np.hstack(
+ (merged_data[event][0], timeseries[0]))
+ merged_data[event][1] = np.hstack(
+ (merged_data[event][1], timeseries[1]))
return merged_data
diff --git a/rec_to_nwb/processing/nwb/components/dio/old_dio_extractor.py b/rec_to_nwb/processing/nwb/components/dio/old_dio_extractor.py
deleted file mode 100644
index 3a4f1f71e..000000000
--- a/rec_to_nwb/processing/nwb/components/dio/old_dio_extractor.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import logging.config
-import os
-
-from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
-from rec_to_nwb.processing.time.continuous_time_extractor import ContinuousTimeExtractor
-from rec_to_nwb.processing.time.timestamp_converter import TimestampConverter
-
-path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class OldDioExtractor:
-
- @staticmethod
- def extract_dio_for_single_dataset(filtered_files):
- single_dataset_data = {}
- for dio_file in filtered_files:
- try:
- dio_data = readTrodesExtractedDataFile(filtered_files[dio_file])
- keys, values = OldDioExtractor.__get_dio_time_series(dio_data)
- single_dataset_data[dio_file] = ([keys, values])
-
- except KeyError as error:
- message = "there is no " + str(dio_file) + ", error: "
- logger.exception(message + str(error))
- except TypeError as error:
- message = "there is no data for event " + str(dio_file) + ", error: "
- logger.exception(message + str(error))
- return single_dataset_data
-
- @staticmethod
- def __get_dio_time_series(dio_data):
-
- values = [bool(recorded_event[1]) for recorded_event in dio_data['data']]
- keys = [recorded_event[0] for recorded_event in dio_data['data']]
- return keys, values
-
diff --git a/rec_to_nwb/processing/nwb/components/dio/old_dio_manager.py b/rec_to_nwb/processing/nwb/components/dio/old_dio_manager.py
deleted file mode 100644
index 5ae4cc123..000000000
--- a/rec_to_nwb/processing/nwb/components/dio/old_dio_manager.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import numpy as np
-
-from rec_to_nwb.processing.nwb.components.dio.old_dio_extractor import OldDioExtractor
-
-
-class OldDioManager:
-
- def __init__(self, dio_files, dio_metadata):
- self.dio_files = dio_files
- self.dio_metadata = dio_metadata
-
- def get_dio(self):
- """"extract data from DIO files and match them with metadata"""
-
- all_dio_data = []
- number_of_datasets = len(self.dio_files)
- for i in range(number_of_datasets):
- all_dio_data.append(
- OldDioExtractor.extract_dio_for_single_dataset(
- filtered_files=self.dio_files[i]
- )
- )
- return self.__merge_dio_data(all_dio_data)
-
- @classmethod
- def __merge_dio_data(cls, data_from_multiple_datasets):
- merged_data = data_from_multiple_datasets[0]
- for single_dataset_data in data_from_multiple_datasets[1:]:
- for event, timeseries in single_dataset_data.items():
- merged_data[event][0] = np.hstack((merged_data[event][0], timeseries[0]))
- merged_data[event][1].extend(timeseries[1])
-
- return merged_data
diff --git a/rec_to_nwb/processing/nwb/components/electrode_group/electrode_group_factory.py b/rec_to_nwb/processing/nwb/components/electrode_group/electrode_group_factory.py
index 51886e0fc..61ba4a695 100644
--- a/rec_to_nwb/processing/nwb/components/electrode_group/electrode_group_factory.py
+++ b/rec_to_nwb/processing/nwb/components/electrode_group/electrode_group_factory.py
@@ -1,10 +1,12 @@
-from ndx_franklab_novela.nwb_electrode_group import NwbElectrodeGroup
+from ndx_franklab_novela import NwbElectrodeGroup
from pynwb.ecephys import ElectrodeGroup
-
-from rec_to_nwb.processing.nwb.components.electrode_group.fl_electrode_group import FlElectrodeGroup
-from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group import FlNwbElectrodeGroup
+from rec_to_nwb.processing.nwb.components.electrode_group.fl_electrode_group import \
+ FlElectrodeGroup
+from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group import \
+ FlNwbElectrodeGroup
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_not_none
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_not_none
class ElectrodeGroupFactory:
diff --git a/rec_to_nwb/processing/nwb/components/electrode_group/electrode_group_injector.py b/rec_to_nwb/processing/nwb/components/electrode_group/electrode_group_injector.py
index ee6fa479a..a4513ec1f 100644
--- a/rec_to_nwb/processing/nwb/components/electrode_group/electrode_group_injector.py
+++ b/rec_to_nwb/processing/nwb/components/electrode_group/electrode_group_injector.py
@@ -1,5 +1,4 @@
from pynwb import NWBFile
-
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group.py b/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group.py
index 206c01307..c779cd467 100644
--- a/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group.py
+++ b/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group.py
@@ -4,4 +4,4 @@ def __init__(self, name, description, location, device):
self.name = name
self.description = description
self.location = location
- self.device = device
\ No newline at end of file
+ self.device = device
diff --git a/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group_builder.py b/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group_builder.py
index 741bae989..a6ad91b8a 100644
--- a/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group_builder.py
+++ b/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group_builder.py
@@ -1,6 +1,6 @@
from pynwb.device import Device
-
-from rec_to_nwb.processing.nwb.components.electrode_group.fl_electrode_group import FlElectrodeGroup
+from rec_to_nwb.processing.nwb.components.electrode_group.fl_electrode_group import \
+ FlElectrodeGroup
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group_manager.py b/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group_manager.py
index 17e8c676c..625aa9287 100644
--- a/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group_manager.py
+++ b/rec_to_nwb/processing/nwb/components/electrode_group/fl_electrode_group_manager.py
@@ -1,6 +1,6 @@
-from ndx_franklab_novela.probe import Probe
-
-from rec_to_nwb.processing.nwb.components.electrode_group.fl_electrode_group_builder import FlElectrodeGroupBuilder
+from ndx_franklab_novela import Probe
+from rec_to_nwb.processing.nwb.components.electrode_group.fl_electrode_group_builder import \
+ FlElectrodeGroupBuilder
from rec_to_nwb.processing.tools.beartype.beartype import beartype
@@ -15,7 +15,8 @@ def get_fl_electrode_groups(self, probes: list, electrode_groups_valid_map: set)
fl_electrode_groups = []
for electrode_group_metadata in self.electrode_groups_metadata:
if electrode_group_metadata['id'] in electrode_groups_valid_map:
- probe = self.__get_probe_by_type(probes, electrode_group_metadata['device_type'])
+ probe = self.__get_probe_by_type(
+ probes, electrode_group_metadata['device_type'])
fl_electrode_groups.append(
FlElectrodeGroupBuilder.build(
metadata=electrode_group_metadata,
diff --git a/rec_to_nwb/processing/nwb/components/electrode_group/fl_nwb_electrode_group_builder.py b/rec_to_nwb/processing/nwb/components/electrode_group/fl_nwb_electrode_group_builder.py
index 818ba9dfb..634bf5a85 100644
--- a/rec_to_nwb/processing/nwb/components/electrode_group/fl_nwb_electrode_group_builder.py
+++ b/rec_to_nwb/processing/nwb/components/electrode_group/fl_nwb_electrode_group_builder.py
@@ -1,6 +1,6 @@
from pynwb.device import Device
-
-from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group import FlNwbElectrodeGroup
+from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group import \
+ FlNwbElectrodeGroup
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/electrode_group/fl_nwb_electrode_group_manager.py b/rec_to_nwb/processing/nwb/components/electrode_group/fl_nwb_electrode_group_manager.py
index d2b7c1a9e..1ca021554 100644
--- a/rec_to_nwb/processing/nwb/components/electrode_group/fl_nwb_electrode_group_manager.py
+++ b/rec_to_nwb/processing/nwb/components/electrode_group/fl_nwb_electrode_group_manager.py
@@ -1,6 +1,6 @@
-from ndx_franklab_novela.probe import Probe
-
-from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group_builder import FlNwbElectrodeGroupBuilder
+from ndx_franklab_novela import Probe
+from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group_builder import \
+ FlNwbElectrodeGroupBuilder
from rec_to_nwb.processing.tools.beartype.beartype import beartype
@@ -32,7 +32,8 @@ def get_fl_nwb_electrode_groups(self, probes: list, electrode_groups_valid_map:
fl_nwb_electrode_groups = []
for electrode_group_metadata in self.electrode_groups_metadata:
if electrode_group_metadata['id'] in electrode_groups_valid_map:
- probe = self.__get_probe_by_type(probes, electrode_group_metadata['device_type'])
+ probe = self.__get_probe_by_type(
+ probes, electrode_group_metadata['device_type'])
fl_nwb_electrode_groups.append(
FlNwbElectrodeGroupBuilder.build(
metadata=electrode_group_metadata,
diff --git a/rec_to_nwb/processing/nwb/components/electrodes/electrode_creator.py b/rec_to_nwb/processing/nwb/components/electrodes/electrode_creator.py
index 00edaea58..5035cb87d 100644
--- a/rec_to_nwb/processing/nwb/components/electrodes/electrode_creator.py
+++ b/rec_to_nwb/processing/nwb/components/electrodes/electrode_creator.py
@@ -1,8 +1,9 @@
from pynwb import NWBFile
-
-from rec_to_nwb.processing.nwb.components.electrodes.fl_electrodes import FlElectrode
+from rec_to_nwb.processing.nwb.components.electrodes.fl_electrodes import \
+ FlElectrode
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_not_none
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_not_none
class ElectrodesCreator:
@@ -10,7 +11,8 @@ class ElectrodesCreator:
@classmethod
@beartype
def create(cls, nwb_content: NWBFile, fl_electrode: FlElectrode):
- validate_parameters_not_none(__name__, fl_electrode.electrode_group, fl_electrode.electrode_id)
+ validate_parameters_not_none(
+ __name__, fl_electrode.electrode_group, fl_electrode.electrode_id)
nwb_content.add_electrode(
x=0.0,
@@ -22,4 +24,3 @@ def create(cls, nwb_content: NWBFile, fl_electrode: FlElectrode):
group=fl_electrode.electrode_group,
id=fl_electrode.electrode_id
)
-
diff --git a/rec_to_nwb/processing/nwb/components/electrodes/extension/fl_electrode_extension_factory.py b/rec_to_nwb/processing/nwb/components/electrodes/extension/fl_electrode_extension_factory.py
index c95129072..37ed16b2d 100644
--- a/rec_to_nwb/processing/nwb/components/electrodes/extension/fl_electrode_extension_factory.py
+++ b/rec_to_nwb/processing/nwb/components/electrodes/extension/fl_electrode_extension_factory.py
@@ -40,7 +40,7 @@ def create_bad_channels(cls, ntrode_metadata: list) -> list:
bad_channels = []
for ntrode in ntrode_metadata:
bad_channels.extend(
- [bool(counter in ntrode['bad_channels']) for counter, _ in enumerate(ntrode['map'])]
+ [bool(channel in ntrode['bad_channels']) for channel in ntrode['map']]
)
return bad_channels
@@ -72,13 +72,25 @@ def create_probe_electrode(cls, probes_metadata: list, electrode_groups_metadata
@classmethod
def create_ref_elect_id(cls, spike_n_trodes: list, ntrode_metadata: dict):
+ # create a list of ntrode_ids, channels, and their indices
+ ntrode_elect_id = dict()
+ elect_id = 0
+ for ntrode in ntrode_metadata:
+ ntrode_id = ntrode['ntrode_id']
+ ntrode_elect_id[ntrode_id] = dict()
+ for chan in ntrode["map"]:
+ # adjust for 1 based channel numbers in rec file header: ntrode["map"] is 0 based, so we have to add 1 to the zero based number to get the index
+ # that corresponds to spike_n_trode.ref_chan below
+ ntrode_elect_id[ntrode_id][int(chan)+1] = elect_id
+ elect_id+=1
+
ref_elect_id = []
for spike_n_trode in spike_n_trodes:
- if not int(spike_n_trode.ref_n_trode_id) == 0:
+ if spike_n_trode.ref_n_trode_id:
for ntrode in ntrode_metadata:
if int(ntrode["ntrode_id"]) == int(spike_n_trode.ref_n_trode_id):
ref_elect_id.extend(
- [ntrode["map"][spike_n_trode.ref_chan]]
+ [ntrode_elect_id[spike_n_trode.ref_n_trode_id][int(spike_n_trode.ref_chan)]]
* len(spike_n_trode.spike_channels)
)
else:
diff --git a/rec_to_nwb/processing/nwb/components/electrodes/extension/fl_electrode_extension_manager.py b/rec_to_nwb/processing/nwb/components/electrodes/extension/fl_electrode_extension_manager.py
index ffb8707ce..0b2eb36d7 100644
--- a/rec_to_nwb/processing/nwb/components/electrodes/extension/fl_electrode_extension_manager.py
+++ b/rec_to_nwb/processing/nwb/components/electrodes/extension/fl_electrode_extension_manager.py
@@ -28,8 +28,8 @@ def __init__(self, probes_metadata: list, metadata: dict, header: Header):
@beartype
def get_fl_electrodes_extension(self, electrodes_valid_map: list) -> FlElectrodeExtension:
probes_metadata = self.probes_metadata
- electrode_groups_metadata = self.metadata['electrode groups']
- ntrode_metadata = self.metadata['ntrode electrode group channel map']
+ electrode_groups_metadata = self.metadata['electrode_groups']
+ ntrode_metadata = self.metadata['ntrode_electrode_group_channel_map']
spike_n_trodes = self.header.configuration.spike_configuration.spike_n_trodes
rel = FlElectrodeExtensionFactory.create_rel(
@@ -97,5 +97,6 @@ def __validate_extension_length(*args):
@staticmethod
def __filter_extension_list_with_electrodes_valid_map(electrodes_valid_map, extension):
+ #this function currently doesn't do anything, so it could be removed
tmp_electrodes_valid_map = copy.deepcopy(electrodes_valid_map)
- return [value for value in extension if tmp_electrodes_valid_map.pop(0)]
+ return [value for value in extension]
diff --git a/rec_to_nwb/processing/nwb/components/electrodes/fl_electrode_builder.py b/rec_to_nwb/processing/nwb/components/electrodes/fl_electrode_builder.py
index aa73d2dc0..574c8c3b8 100644
--- a/rec_to_nwb/processing/nwb/components/electrodes/fl_electrode_builder.py
+++ b/rec_to_nwb/processing/nwb/components/electrodes/fl_electrode_builder.py
@@ -1,6 +1,6 @@
from pynwb.ecephys import ElectrodeGroup
-
-from rec_to_nwb.processing.nwb.components.electrodes.fl_electrodes import FlElectrode
+from rec_to_nwb.processing.nwb.components.electrodes.fl_electrodes import \
+ FlElectrode
from rec_to_nwb.processing.tools.beartype.beartype import beartype
@@ -9,4 +9,4 @@ class FlElectrodesBuilder:
@staticmethod
@beartype
def build(electrode_id: int, electrode_group: ElectrodeGroup):
- return FlElectrode(electrode_id, electrode_group)
\ No newline at end of file
+ return FlElectrode(electrode_id, electrode_group)
diff --git a/rec_to_nwb/processing/nwb/components/electrodes/fl_electrode_manager.py b/rec_to_nwb/processing/nwb/components/electrodes/fl_electrode_manager.py
index 213d207d2..eece44ab7 100644
--- a/rec_to_nwb/processing/nwb/components/electrodes/fl_electrode_manager.py
+++ b/rec_to_nwb/processing/nwb/components/electrodes/fl_electrode_manager.py
@@ -1,9 +1,12 @@
import copy
-from rec_to_nwb.processing.nwb.components.electrodes.fl_electrode_builder import FlElectrodesBuilder
+from rec_to_nwb.processing.nwb.components.electrodes.fl_electrode_builder import \
+ FlElectrodesBuilder
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.filter_probe_by_type import filter_probe_by_type
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_not_none
+from rec_to_nwb.processing.tools.filter_probe_by_type import \
+ filter_probe_by_type
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_not_none
class FlElectrodeManager:
@@ -23,21 +26,19 @@ def get_fl_electrodes(self, electrode_groups: list, electrodes_valid_map: list,
fl_electrodes = []
fl_electrode_id = -1
for electrode_group_metadata in self.electrode_groups_metadata:
- probe_metadata = filter_probe_by_type(self.probes_metadata, electrode_group_metadata['device_type'])
+ probe_metadata = filter_probe_by_type(
+ self.probes_metadata, electrode_group_metadata['device_type'])
for shank in probe_metadata['shanks']:
for _ in shank['electrodes']:
fl_electrode_id += 1
-
- if tmp_electrodes_valid_map.pop(0) and \
- (electrode_group_metadata['id'] in electrode_groups_valid_map):
-
- fl_electrodes.append(
- self.fl_electrodes_builder.build(
- fl_electrode_id,
- self.__get_electrode_group(electrode_group_metadata, electrode_groups)
- )
+ fl_electrodes.append(
+ self.fl_electrodes_builder.build(
+ fl_electrode_id,
+ self.__get_electrode_group(
+ electrode_group_metadata, electrode_groups)
)
+ )
return fl_electrodes
@staticmethod
@@ -50,4 +51,5 @@ def __get_electrode_group(electrode_group_metadata, electrode_groups):
@staticmethod
@beartype
def __validate_parameters(electrode_groups: list):
- [validate_parameters_not_none(__name__, electrode_group.name) for electrode_group in electrode_groups]
+ [validate_parameters_not_none(__name__, electrode_group.name)
+ for electrode_group in electrode_groups]
diff --git a/rec_to_nwb/processing/nwb/components/epochs/epochs_tag_extractor.py b/rec_to_nwb/processing/nwb/components/epochs/epochs_tag_extractor.py
index ae9c2e391..62b33c6c0 100644
--- a/rec_to_nwb/processing/nwb/components/epochs/epochs_tag_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/epochs/epochs_tag_extractor.py
@@ -4,4 +4,4 @@ def __init__(self, datasets):
self.datasets = datasets
def get_tags(self):
- return [dataset.name for dataset in self.datasets]
\ No newline at end of file
+ return [dataset.name for dataset in self.datasets]
diff --git a/rec_to_nwb/processing/nwb/components/epochs/fl_epochs.py b/rec_to_nwb/processing/nwb/components/epochs/fl_epochs.py
index cf40d0079..05ea1e02c 100644
--- a/rec_to_nwb/processing/nwb/components/epochs/fl_epochs.py
+++ b/rec_to_nwb/processing/nwb/components/epochs/fl_epochs.py
@@ -1,10 +1,12 @@
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_equal_length
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_equal_length
class FlEpochs:
def __init__(self, session_start_times, session_end_times, tags):
- validate_parameters_equal_length(__name__, session_start_times, session_end_times, tags)
+ validate_parameters_equal_length(
+ __name__, session_start_times, session_end_times, tags)
self.session_start_times = session_start_times
self.session_end_times = session_end_times
diff --git a/rec_to_nwb/processing/nwb/components/epochs/fl_epochs_extractor.py b/rec_to_nwb/processing/nwb/components/epochs/fl_epochs_extractor.py
index cac5caa5c..8fc294af3 100644
--- a/rec_to_nwb/processing/nwb/components/epochs/fl_epochs_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/epochs/fl_epochs_extractor.py
@@ -1,5 +1,7 @@
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
+NANOSECONDS_PER_SECOND = 1E9
+
class FlEpochsExtractor:
@@ -10,9 +12,14 @@ def extract_epochs(self):
session_start_times = []
session_end_times = []
for continuous_time_file in self.continuous_time_files:
- continuous_time_data = self.__read_contunious_time_file(continuous_time_file)
- session_start_times.append(float(continuous_time_data['data'][0][1]) / 1E9)
- session_end_times.append(float(continuous_time_data['data'][-1][1]) / 1E9)
+ continuous_time_data = self.__read_contunious_time_file(
+ continuous_time_file)
+ session_start_times.append(
+ float(continuous_time_data['data'][0][1]) /
+ NANOSECONDS_PER_SECOND)
+ session_end_times.append(
+ float(continuous_time_data['data'][-1][1]) /
+ NANOSECONDS_PER_SECOND)
return session_start_times, session_end_times
def __read_contunious_time_file(self, continuous_time_file):
diff --git a/rec_to_nwb/processing/nwb/components/epochs/fl_epochs_manager.py b/rec_to_nwb/processing/nwb/components/epochs/fl_epochs_manager.py
index 7b7420aec..8496cdeaa 100644
--- a/rec_to_nwb/processing/nwb/components/epochs/fl_epochs_manager.py
+++ b/rec_to_nwb/processing/nwb/components/epochs/fl_epochs_manager.py
@@ -1,6 +1,9 @@
-from rec_to_nwb.processing.nwb.components.epochs.epochs_tag_extractor import EpochsTagExtractor
-from rec_to_nwb.processing.nwb.components.epochs.fl_epochs_builder import FlEpochsBuilder
-from rec_to_nwb.processing.nwb.components.epochs.fl_epochs_extractor import FlEpochsExtractor
+from rec_to_nwb.processing.nwb.components.epochs.epochs_tag_extractor import \
+ EpochsTagExtractor
+from rec_to_nwb.processing.nwb.components.epochs.fl_epochs_builder import \
+ FlEpochsBuilder
+from rec_to_nwb.processing.nwb.components.epochs.fl_epochs_extractor import \
+ FlEpochsExtractor
from rec_to_nwb.processing.tools.beartype.beartype import beartype
@@ -8,7 +11,8 @@ class FlEpochsManager:
@beartype
def __init__(self, datasets: list):
- self.continuous_time_files = [dataset.get_continuous_time() for dataset in datasets]
+ self.continuous_time_files = [
+ dataset.get_continuous_time() for dataset in datasets]
epochs_tags = self.__get_epochs_tags(datasets)
self.fl_epochs_builder = FlEpochsBuilder(epochs_tags)
diff --git a/rec_to_nwb/processing/nwb/components/iterator/data_iterator.py b/rec_to_nwb/processing/nwb/components/iterator/data_iterator.py
index 07bb21b23..574920780 100644
--- a/rec_to_nwb/processing/nwb/components/iterator/data_iterator.py
+++ b/rec_to_nwb/processing/nwb/components/iterator/data_iterator.py
@@ -10,27 +10,47 @@ def __init__(self, data):
self.current_file = 0
self.current_dataset = 0
- self.number_of_steps = self.data.get_number_of_datasets() * self.data.get_number_of_files_per_dataset()
- self.dataset_file_length = self.data.get_file_lenghts_in_datasets()
+ self.number_of_datasets = self.data.get_number_of_datasets()
+ self.number_of_steps = self.number_of_datasets * \
+ self.data.get_number_of_files_per_dataset()
+ self.dataset_file_length = self.data.get_file_lengths_in_datasets()
self.number_of_rows = self.data.get_number_of_rows_per_file()
self.number_of_files_in_single_dataset = self.data.get_number_of_files_per_dataset()
- self.shape = [self.data.get_final_data_shape()[1], self.data.get_final_data_shape()[0]]
+ self.shape = [self.data.get_final_data_shape(
+ )[1], self.data.get_final_data_shape()[0]]
def __iter__(self):
return self
def _get_selection(self):
+ if isinstance(self.number_of_rows, int):
+ # single number (legacy behavior)
+ start_index = (self.current_file * self.number_of_rows)
+ stop_index = ((self.current_file + 1) * self.number_of_rows)
+ else:
+ # expecting a list (different number_of_rows for each file)
+ start_index = sum(self.number_of_rows[0:self.current_file])
+ stop_index = sum(self.number_of_rows[0:(self.current_file + 1)])
return np.s_[sum(self.dataset_file_length[0:self.current_dataset]):
sum(self.dataset_file_length[0:self.current_dataset + 1]),
- (self.current_file * self.number_of_rows):
- ((self.current_file + 1) * self.number_of_rows)]
+ start_index:
+ stop_index]
@staticmethod
def get_selection(number_of_threads, current_dataset, dataset_file_length, current_file, number_of_rows):
+ if isinstance(number_of_rows, int):
+ # single number (legacy behavior)
+ start_index = (current_file * number_of_rows)
+ stop_index = ((current_file + number_of_threads) * number_of_rows)
+ else:
+ # expecting a list (different number_of_rows for each file)
+ start_index = sum(number_of_rows[0:current_file])
+ stop_index = sum(
+ number_of_rows[0:(current_file + number_of_threads)])
return np.s_[sum(dataset_file_length[0:current_dataset]):
sum(dataset_file_length[0:current_dataset + 1]),
- (current_file * number_of_rows):
- ((current_file + number_of_threads) * number_of_rows)]
+ start_index:
+ stop_index]
def recommended_chunk_shape(self):
return None
diff --git a/rec_to_nwb/processing/nwb/components/iterator/multi_thread_data_iterator.py b/rec_to_nwb/processing/nwb/components/iterator/multi_thread_data_iterator.py
index 39a3599e4..f7f33c18b 100644
--- a/rec_to_nwb/processing/nwb/components/iterator/multi_thread_data_iterator.py
+++ b/rec_to_nwb/processing/nwb/components/iterator/multi_thread_data_iterator.py
@@ -2,8 +2,8 @@
import numpy as np
from hdmf.data_utils import DataChunk
-
-from rec_to_nwb.processing.nwb.components.iterator.data_iterator import DataIterator
+from rec_to_nwb.processing.nwb.components.iterator.data_iterator import \
+ DataIterator
class MultiThreadDataIterator(DataIterator):
@@ -13,22 +13,27 @@ def __init__(self, data, number_of_threads=6):
def __next__(self):
if self._current_index < self.number_of_steps:
- number_of_threads_in_current_step = min(self.number_of_threads,
- self.number_of_files_in_single_dataset - self.current_file)
+ number_of_threads_in_current_step = min(
+ self.number_of_threads,
+ self.number_of_files_in_single_dataset - self.current_file)
with concurrent.futures.ThreadPoolExecutor() as executor:
- threads = [executor.submit(MultiThreadDataIterator.get_data_from_file,
- self.data, self.current_dataset, self.current_file + i)
- for i in range(number_of_threads_in_current_step)]
+ threads = [executor.submit(
+ MultiThreadDataIterator.get_data_from_file,
+ self.data, self.current_dataset, self.current_file + i)
+ for i in range(number_of_threads_in_current_step)]
data_from_multiple_files = ()
for thread in threads:
data_from_multiple_files += (thread.result(),)
- stacked_data_from_multiple_files = np.hstack(data_from_multiple_files)
- selection = self.get_selection(number_of_threads=number_of_threads_in_current_step,
- current_dataset=self.current_dataset,
- dataset_file_length=self.dataset_file_length,
- current_file=self.current_file,
- number_of_rows=self.number_of_rows)
- data_chunk = DataChunk(data=stacked_data_from_multiple_files, selection=selection)
+ stacked_data_from_multiple_files = np.hstack(
+ data_from_multiple_files)
+ selection = self.get_selection(
+ number_of_threads=number_of_threads_in_current_step,
+ current_dataset=self.current_dataset,
+ dataset_file_length=self.dataset_file_length,
+ current_file=self.current_file,
+ number_of_rows=self.number_of_rows)
+ data_chunk = DataChunk(
+ data=stacked_data_from_multiple_files, selection=selection)
self._current_index += number_of_threads_in_current_step
self.current_file += number_of_threads_in_current_step
@@ -48,3 +53,56 @@ def __next__(self):
def get_data_from_file(data, current_dataset, current_file):
return np.transpose(data.read_data(current_dataset, current_file))
+
+# TODO: finish this code and move to new file when data are extracted in a single file.
+class ChunkedDataIterator(DataIterator):
+ def __init__(self, data, number_of_threads=6, read_chunk_mb=100):
+ DataIterator.__init__(self, data)
+ self.number_of_threads = number_of_threads
+ self.read_chunk_mb = read_chunk_mb
+ # Figure out the size of each datafile in each dataset where one dataset is an epoch
+ self.dataset_file_dims = ()
+ for dataset in range(self.number_of_datasets):
+ self.dataset_file_dims.append(data.get_data_dims(dataset, 0))
+
+ def __next__(self):
+ if self._current_index < self.number_of_steps:
+ number_of_threads_in_current_step = min(
+ self.number_of_threads,
+ self.number_of_files_in_single_dataset - self.current_file)
+ with concurrent.futures.ThreadPoolExecutor() as executor:
+ threads = [executor.submit(
+ MultiThreadDataIterator.get_data_from_file,
+ self.data, self.current_dataset, self.current_file + i)
+ for i in range(number_of_threads_in_current_step)]
+ data_from_multiple_files = ()
+ for thread in threads:
+ data_from_multiple_files += (thread.result(),)
+ stacked_data_from_multiple_files = np.hstack(
+ data_from_multiple_files)
+ selection = self.get_selection(
+ number_of_threads=number_of_threads_in_current_step,
+ current_dataset=self.current_dataset,
+ dataset_file_length=self.dataset_file_length,
+ current_file=self.current_file,
+ number_of_rows=self.number_of_rows)
+ data_chunk = DataChunk(
+ data=stacked_data_from_multiple_files, selection=selection)
+
+ self._current_index += number_of_threads_in_current_step
+ self.current_file += number_of_threads_in_current_step
+
+ if self.current_file >= self.number_of_files_in_single_dataset:
+ self.current_dataset += 1
+ self.current_file = 0
+
+ del stacked_data_from_multiple_files
+ return data_chunk
+
+ raise StopIteration
+
+ next = __next__
+
+ @staticmethod
+ def get_data_from_file(data, current_dataset, current_file):
+ return np.transpose(data.read_data(current_dataset, current_file))
diff --git a/rec_to_nwb/processing/nwb/components/iterator/multi_thread_timestamp_iterator.py b/rec_to_nwb/processing/nwb/components/iterator/multi_thread_timestamp_iterator.py
index 24d1bed02..24b518ad3 100644
--- a/rec_to_nwb/processing/nwb/components/iterator/multi_thread_timestamp_iterator.py
+++ b/rec_to_nwb/processing/nwb/components/iterator/multi_thread_timestamp_iterator.py
@@ -2,8 +2,8 @@
import numpy as np
from hdmf.data_utils import DataChunk
-
-from rec_to_nwb.processing.nwb.components.iterator.timestamp_iterator import TimestampIterator
+from rec_to_nwb.processing.nwb.components.iterator.timestamp_iterator import \
+ TimestampIterator
class MultiThreadTimestampIterator(TimestampIterator):
@@ -15,19 +15,23 @@ def __init__(self, data, number_of_threads=6):
# Override
def __next__(self):
if self._current_index < self.number_of_steps:
- number_of_threads_in_current_step = min(self.number_of_threads,
- self.number_of_steps - self._current_index)
+ number_of_threads_in_current_step = min(
+ self.number_of_threads,
+ self.number_of_steps - self._current_index)
with concurrent.futures.ThreadPoolExecutor() as executor:
- threads = [executor.submit(MultiThreadTimestampIterator.get_data_from_file,
- self.data, self.current_dataset + i)
- for i in range(number_of_threads_in_current_step)]
+ threads = [executor.submit(
+ MultiThreadTimestampIterator.get_data_from_file,
+ self.data, self.current_dataset + i)
+ for i in range(number_of_threads_in_current_step)]
data_from_multiple_files = ()
for thread in threads:
data_from_multiple_files += (thread.result(),)
- stacked_data_from_multiple_files = np.hstack(data_from_multiple_files)
+ stacked_data_from_multiple_files = np.hstack(
+ data_from_multiple_files)
selection = self.__get_selection(number_of_threads_in_current_step)
- data_chunk = DataChunk(data=stacked_data_from_multiple_files, selection=selection)
+ data_chunk = DataChunk(
+ data=stacked_data_from_multiple_files, selection=selection)
self._current_index += number_of_threads_in_current_step
self.current_dataset += number_of_threads_in_current_step
@@ -40,5 +44,7 @@ def __next__(self):
next = __next__
def __get_selection(self, number_of_threads_in_current_step):
- return np.s_[sum(self.dataset_file_lenght[0:self.current_dataset]):
- sum(self.dataset_file_lenght[0:self.current_dataset + number_of_threads_in_current_step]), ]
\ No newline at end of file
+ return np.s_[
+ sum(self.dataset_file_lenght[0:self.current_dataset]):
+ sum(self.dataset_file_lenght[0:self.current_dataset +
+ number_of_threads_in_current_step]), ]
diff --git a/rec_to_nwb/processing/nwb/components/iterator/single_thread_data_iterator.py b/rec_to_nwb/processing/nwb/components/iterator/single_thread_data_iterator.py
index f31b45405..16397e7f4 100644
--- a/rec_to_nwb/processing/nwb/components/iterator/single_thread_data_iterator.py
+++ b/rec_to_nwb/processing/nwb/components/iterator/single_thread_data_iterator.py
@@ -1,7 +1,7 @@
import numpy as np
from hdmf.data_utils import DataChunk
-
-from rec_to_nwb.processing.nwb.components.iterator.data_iterator import DataIterator
+from rec_to_nwb.processing.nwb.components.iterator.data_iterator import \
+ DataIterator
class SingleThreadDataIterator(DataIterator):
@@ -33,4 +33,5 @@ def __next__(self):
next = __next__
def __get_data_from_file(self):
- return np.transpose(self.data.read_data(self.current_dataset, self.current_file))
+ return np.transpose(self.data.read_data(self.current_dataset,
+ self.current_file))
diff --git a/rec_to_nwb/processing/nwb/components/iterator/single_thread_timestamp_iterator.py b/rec_to_nwb/processing/nwb/components/iterator/single_thread_timestamp_iterator.py
index be89a5b2d..19d59053d 100644
--- a/rec_to_nwb/processing/nwb/components/iterator/single_thread_timestamp_iterator.py
+++ b/rec_to_nwb/processing/nwb/components/iterator/single_thread_timestamp_iterator.py
@@ -1,7 +1,7 @@
import numpy as np
from hdmf.data_utils import DataChunk
-
-from rec_to_nwb.processing.nwb.components.iterator.timestamp_iterator import TimestampIterator
+from rec_to_nwb.processing.nwb.components.iterator.timestamp_iterator import \
+ TimestampIterator
class SingleThreadTimestampIterator(TimestampIterator):
@@ -26,5 +26,6 @@ def __get_timestamps(self):
return self.data.retrieve_real_timestamps(self.current_dataset)
def __get_selection(self):
- return np.s_[sum(self.dataset_file_lenght[0:self.current_dataset]):
- sum(self.dataset_file_lenght[0:self.current_dataset + 1]), ]
\ No newline at end of file
+ return np.s_[
+ sum(self.dataset_file_lenght[0:self.current_dataset]):
+ sum(self.dataset_file_lenght[0:self.current_dataset + 1]), ]
diff --git a/rec_to_nwb/processing/nwb/components/iterator/timestamp_iterator.py b/rec_to_nwb/processing/nwb/components/iterator/timestamp_iterator.py
index 1fe2a20ef..59d51b423 100644
--- a/rec_to_nwb/processing/nwb/components/iterator/timestamp_iterator.py
+++ b/rec_to_nwb/processing/nwb/components/iterator/timestamp_iterator.py
@@ -10,9 +10,9 @@ def __init__(self, data):
self._current_index = 0
self.current_dataset = 0
- self.dataset_file_lenght = data.get_file_lenghts_in_datasets()
+ self.dataset_file_lenght = data.get_file_lengths_in_datasets()
self.number_of_steps = self.data.get_number_of_datasets()
- self.dataset_file_length = self.data.get_file_lenghts_in_datasets()
+ self.dataset_file_length = self.data.get_file_lengths_in_datasets()
self.shape = self.data.get_final_data_shape()
# Override
diff --git a/rec_to_nwb/processing/nwb/components/mda/electrical_series_creator.py b/rec_to_nwb/processing/nwb/components/mda/electrical_series_creator.py
index ebc3f487e..416da9f9b 100644
--- a/rec_to_nwb/processing/nwb/components/mda/electrical_series_creator.py
+++ b/rec_to_nwb/processing/nwb/components/mda/electrical_series_creator.py
@@ -10,7 +10,7 @@ def create_mda(cls, fl_mda):
data=fl_mda.mda_data.mda_data,
electrodes=fl_mda.electrode_table_region,
timestamps=fl_mda.mda_data.mda_timestamps,
- comments="sample comment",
- description="Electrical series registered on electrode",
- conversion=fl_mda.conversion
+ conversion=fl_mda.conversion,
+ comments="No comment",
+ description="Recording of extracellular voltage"
)
diff --git a/rec_to_nwb/processing/nwb/components/mda/fl_mda.py b/rec_to_nwb/processing/nwb/components/mda/fl_mda.py
index 307e1b5a4..9519b1fc1 100644
--- a/rec_to_nwb/processing/nwb/components/mda/fl_mda.py
+++ b/rec_to_nwb/processing/nwb/components/mda/fl_mda.py
@@ -1,9 +1,10 @@
class FlMda:
- def __init__(self, sampling_rate, electrode_table_region, mda_data, conversion):
+ def __init__(self, sampling_rate, conversion, electrode_table_region, mda_data):
"""internal representation of mda data"""
self.sampling_rate = sampling_rate
+ self.conversion = conversion
self.electrode_table_region = electrode_table_region
self.mda_data = mda_data
self.conversion = conversion
diff --git a/rec_to_nwb/processing/nwb/components/mda/fl_mda_builder.py b/rec_to_nwb/processing/nwb/components/mda/fl_mda_builder.py
index eb91a8ecd..f59d2002e 100644
--- a/rec_to_nwb/processing/nwb/components/mda/fl_mda_builder.py
+++ b/rec_to_nwb/processing/nwb/components/mda/fl_mda_builder.py
@@ -8,4 +8,4 @@ def __init__(self, sampling_rate, conversion):
self.conversion = conversion
def build(self, electrode_table_region, data):
- return FlMda(self.sampling_rate, electrode_table_region, data, self.conversion)
+ return FlMda(self.sampling_rate, self.conversion, electrode_table_region, data)
diff --git a/rec_to_nwb/processing/nwb/components/mda/fl_mda_extractor.py b/rec_to_nwb/processing/nwb/components/mda/fl_mda_extractor.py
index ea76a2fd9..0613275d9 100644
--- a/rec_to_nwb/processing/nwb/components/mda/fl_mda_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/mda/fl_mda_extractor.py
@@ -1,53 +1,93 @@
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
-from rec_to_nwb.processing.nwb.components.iterator.multi_thread_data_iterator import MultiThreadDataIterator
-from rec_to_nwb.processing.nwb.components.iterator.multi_thread_timestamp_iterator import MultiThreadTimestampIterator
+import os
+
+from rec_to_nwb.processing.exceptions.missing_data_exception import \
+ MissingDataException
+from rec_to_nwb.processing.nwb.components.iterator.multi_thread_data_iterator import \
+ MultiThreadDataIterator
+from rec_to_nwb.processing.nwb.components.iterator.multi_thread_timestamp_iterator import \
+ MultiThreadTimestampIterator
from rec_to_nwb.processing.nwb.components.mda.mda_content import MdaContent
-from rec_to_nwb.processing.nwb.components.mda.mda_data_manager import MdaDataManager
-from rec_to_nwb.processing.nwb.components.mda.mda_timestamp_manager import MdaTimestampDataManager
+from rec_to_nwb.processing.nwb.components.mda.mda_data_manager import \
+ MdaDataManager
+from rec_to_nwb.processing.nwb.components.mda.mda_timestamp_manager import \
+ MdaTimestampDataManager
+
+MICROVOLTS_PER_VOLT = 1e6
class FlMdaExtractor:
- def __init__(self, datasets):
+ def __init__(self, datasets, conversion):
self.datasets = datasets
+ # the conversion is to volts, so we multiple by 1e6 to change to uV
+ self.raw_to_uv = float(conversion) * MICROVOLTS_PER_VOLT
def get_data(self):
- mda_data, timestamps, continuous_time = self.__extract_data()
+ mda_data_files, timestamp_files, continuous_time_files = self.__extract_data_files()
mda_timestamp_data_manager = MdaTimestampDataManager(
- directories=timestamps,
- continuous_time_directories=continuous_time
+ directories=timestamp_files,
+ continuous_time_directories=continuous_time_files
)
- mda_data_manager = MdaDataManager(mda_data)
- data_iterator = MultiThreadDataIterator(mda_data_manager)
- timestamp_iterator = MultiThreadTimestampIterator(mda_timestamp_data_manager)
+ mda_data_manager = MdaDataManager(mda_data_files, self.raw_to_uv)
+
+ # check the number of files and set the number of threads appropriately assuming 32 GB of available RAM
+ def max_file_size(dim):
+ # Loop through datasets and files to find largest file along given dimension (dim)
+ return max([mda_data_manager.get_data_shape(dataset_num, file_num)[dim]
+ for dataset_num in range(len(mda_data_manager.directories))
+ for file_num in range(len(mda_data_manager.directories[dataset_num]))])
+ # samples x channels x 2 bytes/sample
+ bytes_estimate = max_file_size(0) * max_file_size(1) * 2
+ if bytes_estimate < 3e9: # each file < 3GB
+ num_threads = 6
+ elif bytes_estimate < 6e9:
+ num_threads = 3
+ else:
+ num_threads = 1
+
+ print(f'in FlMdaExtractor: will write {num_threads} files as a chunk')
+ data_iterator = MultiThreadDataIterator(
+ mda_data_manager, number_of_threads=num_threads)
+ timestamp_iterator = MultiThreadTimestampIterator(
+ mda_timestamp_data_manager)
return MdaContent(data_iterator, timestamp_iterator)
- def __extract_data(self):
- mda_data = []
- timestamps = []
- continuous_time = []
+ def __extract_data_files(self):
+ mda_data_files = []
+ timestamp_files = []
+ continuous_time_files = []
for dataset in self.datasets:
- data_from_single_dataset = self.__extract_data_for_single_dataset(dataset)
- mda_data.append(data_from_single_dataset[0])
- timestamps.append(data_from_single_dataset[1])
- continuous_time.append(data_from_single_dataset[2])
+ data_files_from_single_dataset = self.__extract_data_files_for_single_dataset(
+ dataset)
+ mda_data_files.append(data_files_from_single_dataset[0])
+ timestamp_files.append(data_files_from_single_dataset[1])
+ continuous_time_files.append(data_files_from_single_dataset[2])
- return mda_data, timestamps, continuous_time
+ return mda_data_files, timestamp_files, continuous_time_files
- def __extract_data_for_single_dataset(self, dataset):
- data_from_current_dataset = self.__get_data_from_current_dataset(dataset)
+ def __extract_data_files_for_single_dataset(self, dataset):
+ data_from_current_dataset = self.__get_data_files_from_current_dataset(
+ dataset)
if not self.__data_exist(data_from_current_dataset, dataset):
- raise MissingDataException("Incomplete data in dataset " + str(dataset.name) + ", missing mda files")
+ raise MissingDataException(
+ "Incomplete data in dataset " + str(dataset.name) + ", missing mda files")
return data_from_current_dataset, [dataset.get_mda_timestamps()], dataset.get_continuous_time()
@staticmethod
- def __get_data_from_current_dataset(dataset):
- return [dataset.get_data_path_from_dataset('mda') + mda_file for mda_file in
- dataset.get_all_data_from_dataset('mda') if
+ def __get_data_files_from_current_dataset(dataset):
+ data_files = [os.path.join(dataset.get_data_path_from_dataset('mda'), mda_file) for mda_file in
+ dataset.get_all_data_from_dataset('mda') if
+ (mda_file.endswith('.mda') and not mda_file.endswith('timestamps.mda'))]
+ if len(data_files) > 0:
+ return data_files
+ else:
+ return [
+ os.path.join(dataset.get_data_path_from_dataset('mountainsort'), mda_file) for mda_file in
+ dataset.get_all_data_from_dataset('mountainsort') if
(mda_file.endswith('.mda') and not mda_file.endswith('timestamps.mda'))]
@staticmethod
diff --git a/rec_to_nwb/processing/nwb/components/mda/fl_mda_manager.py b/rec_to_nwb/processing/nwb/components/mda/fl_mda_manager.py
index 87983ab78..66388a934 100644
--- a/rec_to_nwb/processing/nwb/components/mda/fl_mda_manager.py
+++ b/rec_to_nwb/processing/nwb/components/mda/fl_mda_manager.py
@@ -1,13 +1,20 @@
-from rec_to_nwb.processing.nwb.components.mda.fl_mda_builder import FlMdaBuilder
-from rec_to_nwb.processing.nwb.components.mda.table_region_builder import TableRegionBuilder
-from rec_to_nwb.processing.nwb.components.mda.fl_mda_extractor import FlMdaExtractor
+from rec_to_nwb.processing.nwb.components.mda.fl_mda_builder import \
+ FlMdaBuilder
+from rec_to_nwb.processing.nwb.components.mda.fl_mda_extractor import \
+ FlMdaExtractor
+from rec_to_nwb.processing.nwb.components.mda.table_region_builder import \
+ TableRegionBuilder
+
+VOLTS_IN_MICROVOLTS = 1e-6
class FlMdaManager:
def __init__(self, nwb_content, sampling_rate, datasets, conversion):
self.__table_region_builder = TableRegionBuilder(nwb_content)
- self.__fl_mda_extractor = FlMdaExtractor(datasets)
- self.__fl_mda_builder = FlMdaBuilder(sampling_rate, conversion)
+ self.__fl_mda_extractor = FlMdaExtractor(datasets, conversion)
+ # we converted the data to uV in the extractor, so the conversion to V is always 1e-6
+ self.__fl_mda_builder = FlMdaBuilder(
+ sampling_rate, VOLTS_IN_MICROVOLTS)
def get_data(self):
electrode_table_region = self.__table_region_builder.build()
diff --git a/rec_to_nwb/processing/nwb/components/mda/mda_data_manager.py b/rec_to_nwb/processing/nwb/components/mda/mda_data_manager.py
index 225c8748f..453f0167d 100644
--- a/rec_to_nwb/processing/nwb/components/mda/mda_data_manager.py
+++ b/rec_to_nwb/processing/nwb/components/mda/mda_data_manager.py
@@ -1,13 +1,29 @@
-from mountainlab_pytools.mdaio import readmda
-
+from mountainlab_pytools.mdaio import DiskReadMda, readmda
+from numpy import isclose
from rec_to_nwb.processing.nwb.common.data_manager import DataManager
class MdaDataManager(DataManager):
- def __init__(self, directories):
+ def __init__(self, directories, raw_to_uv):
+ self.raw_to_uv = raw_to_uv
DataManager.__init__(self, directories)
-
# override
+
def read_data(self, dataset_id, file_id):
- data = readmda(self.directories[dataset_id][file_id])
- return data
+ # read the data from the MDA file, convert to uV and then return as int16 unless the data are already in uV
+ if not isclose(self.raw_to_uv, 1.0):
+ return (readmda(self.directories[dataset_id][file_id]) * self.raw_to_uv).astype('int16')
+ else:
+ return readmda(self.directories[dataset_id][file_id]).astype('int16')
+
+ # override to make more efficient
+ def _get_data_shape(self, dataset_id, file_num=0):
+ # use DiskReadMDA to return a two element list with the MxN data dimensions for the first file in a given dataset
+ return DiskReadMda(self.directories[dataset_id][file_num]).dims()
+
+ # override to make more efficient; not clear if this is used right now.
+ def _get_number_of_rows_per_file(self):
+ dataset_num = 0 # assume that all datasets have identical structures
+ # all files may not have the same numbers of rows (e.g. channels)
+ return [self.get_data_shape(dataset_num, file_num)[0]
+ for file_num in range(self.number_of_files_per_dataset)]
diff --git a/rec_to_nwb/processing/nwb/components/mda/mda_timestamp_manager.py b/rec_to_nwb/processing/nwb/components/mda/mda_timestamp_manager.py
index 1ff766170..ad812403b 100644
--- a/rec_to_nwb/processing/nwb/components/mda/mda_timestamp_manager.py
+++ b/rec_to_nwb/processing/nwb/components/mda/mda_timestamp_manager.py
@@ -2,18 +2,22 @@
import os
from mountainlab_pytools.mdaio import readmda
-
-from rec_to_nwb.processing.nwb.common.timestamps_manager import TimestampManager
+from rec_to_nwb.processing.nwb.common.timestamps_manager import \
+ TimestampManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
class MdaTimestampDataManager(TimestampManager):
def __init__(self, directories, continuous_time_directories):
- TimestampManager.__init__(self, directories, continuous_time_directories)
+ TimestampManager.__init__(
+ self, directories, continuous_time_directories)
def _get_timestamps(self, dataset_id):
return readmda(self.directories[dataset_id][0])
diff --git a/rec_to_nwb/processing/nwb/components/mda/old_fl_mda_extractor.py b/rec_to_nwb/processing/nwb/components/mda/old_fl_mda_extractor.py
deleted file mode 100644
index c0b46f256..000000000
--- a/rec_to_nwb/processing/nwb/components/mda/old_fl_mda_extractor.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
-from rec_to_nwb.processing.nwb.components.iterator.multi_thread_data_iterator import MultiThreadDataIterator
-from rec_to_nwb.processing.nwb.components.iterator.multi_thread_timestamp_iterator import MultiThreadTimestampIterator
-from rec_to_nwb.processing.nwb.components.mda.mda_content import MdaContent
-from rec_to_nwb.processing.nwb.components.mda.mda_data_manager import MdaDataManager
-from rec_to_nwb.processing.nwb.components.mda.mda_timestamp_manager import MdaTimestampDataManager
-
-
-class OldFlMdaExtractor:
-
- def __init__(self, datasets):
- self.datasets = datasets
-
- def get_data(self):
- mda_data, timestamps, continuous_time = self.__extract_data()
- mda_timestamp_data_manager = MdaTimestampDataManager(
- directories=timestamps,
- )
- mda_data_manager = MdaDataManager(mda_data)
- data_iterator = MultiThreadDataIterator(mda_data_manager)
- timestamp_iterator = MultiThreadTimestampIterator(mda_timestamp_data_manager)
-
- return MdaContent(data_iterator, timestamp_iterator)
-
- def __extract_data(self):
- mda_data = []
- timestamps = []
- continuous_time = []
-
- for dataset in self.datasets:
- data_from_single_dataset = self.__extract_data_for_single_dataset(dataset)
- mda_data.append(data_from_single_dataset[0])
- timestamps.append(data_from_single_dataset[1])
- continuous_time.append(data_from_single_dataset[2])
-
- return mda_data, timestamps, continuous_time
-
- def __extract_data_for_single_dataset(self, dataset):
- data_from_current_dataset = self.__get_data_from_current_dataset(dataset)
-
- if not self.__data_exist(data_from_current_dataset, dataset):
- raise MissingDataException("Incomplete data in dataset " + str(dataset.name) + ", missing mda files")
-
- return data_from_current_dataset, [dataset.get_mda_timestamps()], dataset.get_continuous_time()
-
- @staticmethod
- def __get_data_from_current_dataset(dataset):
- return [dataset.get_data_path_from_dataset('mda') + mda_file for mda_file in
- dataset.get_all_data_from_dataset('mda') if
- (mda_file.endswith('.mda') and not mda_file.endswith('timestamps.mda'))]
-
- @staticmethod
- def __data_exist(data_from_current_dataset, dataset):
- if (data_from_current_dataset is None
- or dataset.get_mda_timestamps() is None
- or dataset.get_continuous_time() is None):
- return False
- return True
diff --git a/rec_to_nwb/processing/nwb/components/mda/old_fl_mda_manager.py b/rec_to_nwb/processing/nwb/components/mda/old_fl_mda_manager.py
deleted file mode 100644
index 5d8e8aa97..000000000
--- a/rec_to_nwb/processing/nwb/components/mda/old_fl_mda_manager.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from rec_to_nwb.processing.nwb.components.mda.fl_mda_builder import FlMdaBuilder
-from rec_to_nwb.processing.nwb.components.mda.old_fl_mda_extractor import OldFlMdaExtractor
-from rec_to_nwb.processing.nwb.components.mda.table_region_builder import TableRegionBuilder
-
-
-class OldFlMdaManager:
- def __init__(self, nwb_content, sampling_rate, datasets, conversion):
- self.__table_region_builder = TableRegionBuilder(nwb_content)
- self.__fl_mda_extractor = OldFlMdaExtractor(datasets)
- self.__fl_mda_builder = FlMdaBuilder(sampling_rate, conversion)
-
- def get_data(self):
- electrode_table_region = self.__table_region_builder.build()
- data = self.__fl_mda_extractor.get_data()
- return self.__fl_mda_builder.build(electrode_table_region, data)
diff --git a/rec_to_nwb/processing/nwb/components/position/fl_position.py b/rec_to_nwb/processing/nwb/components/position/fl_position.py
index 33c883774..9a4eaf882 100644
--- a/rec_to_nwb/processing/nwb/components/position/fl_position.py
+++ b/rec_to_nwb/processing/nwb/components/position/fl_position.py
@@ -1,4 +1,8 @@
+"""Class that stores the position information"""
+
+
class FlPosition:
+ """Stores the position information"""
def __init__(self, position_data, column_labels, timestamps, conversion):
self.position_data = position_data
diff --git a/rec_to_nwb/processing/nwb/components/position/fl_position_builder.py b/rec_to_nwb/processing/nwb/components/position/fl_position_builder.py
index 9bbd2cf44..4072c6ae8 100644
--- a/rec_to_nwb/processing/nwb/components/position/fl_position_builder.py
+++ b/rec_to_nwb/processing/nwb/components/position/fl_position_builder.py
@@ -1,4 +1,7 @@
-from rec_to_nwb.processing.nwb.components.position.fl_position import FlPosition
+"""Class that instatiates the FlPosition class...."""
+
+from rec_to_nwb.processing.nwb.components.position.fl_position import \
+ FlPosition
class FlPositionBuilder:
diff --git a/rec_to_nwb/processing/nwb/components/position/fl_position_extractor.py b/rec_to_nwb/processing/nwb/components/position/fl_position_extractor.py
index 048996714..beadd722b 100644
--- a/rec_to_nwb/processing/nwb/components/position/fl_position_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/position/fl_position_extractor.py
@@ -1,64 +1,73 @@
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
-from rec_to_nwb.processing.nwb.components.iterator.multi_thread_data_iterator import MultiThreadDataIterator
-from rec_to_nwb.processing.nwb.components.iterator.multi_thread_timestamp_iterator import MultiThreadTimestampIterator
-from rec_to_nwb.processing.nwb.components.position.pos_data_manager import PosDataManager
-from rec_to_nwb.processing.nwb.components.position.pos_timestamp_manager import PosTimestampManager
+"""Gets the online position tracking directories and retrieve position and
+timestamps"""
+import os
+
+from rec_to_nwb.processing.exceptions.missing_data_exception import \
+ MissingDataException
+from rec_to_nwb.processing.nwb.components.iterator.multi_thread_data_iterator import \
+ MultiThreadDataIterator
+from rec_to_nwb.processing.nwb.components.iterator.multi_thread_timestamp_iterator import \
+ MultiThreadTimestampIterator
+from rec_to_nwb.processing.nwb.components.position.pos_data_manager import \
+ PosDataManager
+from rec_to_nwb.processing.nwb.components.position.pos_timestamp_manager import \
+ PosTimestampManager
from rec_to_nwb.processing.tools.beartype.beartype import beartype
class FlPositionExtractor:
@beartype
- def __init__(self, datasets: list):
+ def __init__(self, datasets: list, convert_timestamps: bool = True):
self.datasets = datasets
- self.all_pos, self.continuous_time = self.__extract_data()
+ (self.all_position_directories,
+ self.continuous_time_directories) = self.__extract_data()
+ self.convert_timestamps = convert_timestamps
def __extract_data(self):
- all_pos = []
- continuous_time = []
+ """Gets online position tracking file and corresponding continuous
+ time file"""
+ all_position_directories = []
+ continuous_time_directories = []
for dataset in self.datasets:
- data_from_current_dataset = [
- dataset.get_data_path_from_dataset('pos') + pos_file for pos_file in
- dataset.get_all_data_from_dataset('pos') if
- (pos_file.endswith('.pos_online.dat'))]
+ pos_online_paths = [
+ os.path.join(
+ dataset.get_data_path_from_dataset('pos'), pos_file)
+ for pos_file in dataset.get_all_data_from_dataset('pos')
+ if pos_file.endswith('.pos_online.dat')]
+
if dataset.get_continuous_time() is None:
raise MissingDataException(
'Incomplete data in dataset '
+ str(dataset.name)
+ 'missing continuous time file')
- all_pos.append(data_from_current_dataset)
- continuous_time.append(dataset.get_continuous_time())
- return all_pos, continuous_time
+ all_position_directories.append(pos_online_paths)
+ continuous_time_directories.append(dataset.get_continuous_time())
+ return all_position_directories, continuous_time_directories
def get_positions(self):
- pos_datas = [
- PosDataManager(directories=[single_pos])
- for single_pos in self.all_pos
- ]
return [
- MultiThreadDataIterator(pos_data)
- for pos_data in pos_datas
+ MultiThreadDataIterator(
+ PosDataManager(directories=[position_directory]))
+ for position_directory in self.all_position_directories
]
def get_columns_labels(self):
- pos_datas = [
- PosDataManager(directories=[single_pos])
- for single_pos in self.all_pos
- ]
return [
- pos_data.get_column_labels_as_string()
- for pos_data in pos_datas
+ PosDataManager(
+ directories=[position_directory]).get_column_labels_as_string()
+ for position_directory in self.all_position_directories
]
def get_timestamps(self):
- pos_timestamp_managers = [
- PosTimestampManager(
- directories=[single_pos],
- continuous_time_directories=[continuous_time]
- )
- for single_pos, continuous_time in zip(self.all_pos, self.continuous_time)
- ]
return [
- MultiThreadTimestampIterator(pos_timestamp_manager)
- for pos_timestamp_manager in pos_timestamp_managers
+ MultiThreadTimestampIterator(
+ PosTimestampManager(
+ directories=[position_directory],
+ continuous_time_directories=[continuous_time_directory],
+ convert_timestamps=self.convert_timestamps
+ ))
+ for position_directory, continuous_time_directory in zip(
+ self.all_position_directories,
+ self.continuous_time_directories)
]
diff --git a/rec_to_nwb/processing/nwb/components/position/fl_position_manager.py b/rec_to_nwb/processing/nwb/components/position/fl_position_manager.py
index a76d359e8..fb71acb3b 100644
--- a/rec_to_nwb/processing/nwb/components/position/fl_position_manager.py
+++ b/rec_to_nwb/processing/nwb/components/position/fl_position_manager.py
@@ -1,35 +1,43 @@
-import re
-
-from rec_to_nwb.processing.exceptions.invalid_metadata_exception import InvalidMetadataException
-from rec_to_nwb.processing.nwb.components.position.fl_position_builder import FlPositionBuilder
-from rec_to_nwb.processing.nwb.components.position.fl_position_extractor import FlPositionExtractor
+from rec_to_nwb.processing.exceptions.invalid_metadata_exception import \
+ InvalidMetadataException
+from rec_to_nwb.processing.nwb.components.position.fl_position_builder import \
+ FlPositionBuilder
+from rec_to_nwb.processing.nwb.components.position.fl_position_extractor import \
+ FlPositionExtractor
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_equal_length
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_equal_length
class FlPositionManager:
@beartype
- def __init__(self, datasets: list, metadata: dict, dataset_names: list, process_timestamps: bool):
+ def __init__(self, datasets: list, metadata: dict, dataset_names: list,
+ process_timestamps: bool,
+ convert_timestamps: bool = True):
self.datasets = datasets
self.metadata = metadata
self.dataset_names = dataset_names
self.process_timestamps = process_timestamps
- self.fl_position_extractor = FlPositionExtractor(datasets)
+ self.fl_position_extractor = FlPositionExtractor(
+ datasets,
+ convert_timestamps=convert_timestamps)
self.fl_position_builder = FlPositionBuilder()
@beartype
def get_fl_positions(self) -> list:
cameras_ids = self.__get_cameras_ids(self.dataset_names, self.metadata)
- meters_per_pixels = self.__get_meters_per_pixels(cameras_ids, self.metadata)
+ meters_per_pixels = self.__get_meters_per_pixels(
+ cameras_ids, self.metadata)
position_datas = self.fl_position_extractor.get_positions()
columns_labels = self.fl_position_extractor.get_columns_labels()
if self.process_timestamps:
timestamps = self.fl_position_extractor.get_timestamps()
- validate_parameters_equal_length(__name__, position_datas, columns_labels, timestamps)
+ validate_parameters_equal_length(
+ __name__, position_datas, columns_labels, timestamps)
return [
self.fl_position_builder.build(
@@ -39,10 +47,12 @@ def get_fl_positions(self) -> list:
conversion=float(meters_per_pixel)
)
for position_data, column_labels, timestamp, meters_per_pixel in
- zip(position_datas, columns_labels, timestamps, meters_per_pixels)
+ zip(position_datas, columns_labels,
+ timestamps, meters_per_pixels)
]
else:
- validate_parameters_equal_length(__name__, position_datas, columns_labels)
+ validate_parameters_equal_length(
+ __name__, position_datas, columns_labels)
return [
self.fl_position_builder.build(
@@ -55,27 +65,24 @@ def get_fl_positions(self) -> list:
zip(position_datas, columns_labels, meters_per_pixels)
]
-
-
-
-
@staticmethod
def __get_cameras_ids(dataset_names, metadata):
camera_ids = []
for dataset_name in dataset_names:
- dataset_name = re.sub(r'_\w\d\d?', '', dataset_name)
- dataset_name = re.sub(r'^[0]', '', dataset_name)
-
+ # extract the first the first element of the dataset_name as the epoch number
+ dataset_elements = str(dataset_name).split('_')
+ epoch_num = str(int(dataset_elements[0]))
try:
camera_ids.append(
next(
task['camera_id']
for task in metadata['tasks']
- if dataset_name in task['task_epochs']
+ if epoch_num in task['task_epochs']
)[0]
)
except:
- raise InvalidMetadataException('Invalid camera metadata for datasets')
+ raise InvalidMetadataException(
+ 'Invalid camera metadata for datasets')
return camera_ids
@staticmethod
diff --git a/rec_to_nwb/processing/nwb/components/position/old_fl_position_extractor.py b/rec_to_nwb/processing/nwb/components/position/old_fl_position_extractor.py
deleted file mode 100644
index 6ebe82c79..000000000
--- a/rec_to_nwb/processing/nwb/components/position/old_fl_position_extractor.py
+++ /dev/null
@@ -1,55 +0,0 @@
-from rec_to_nwb.processing.nwb.components.iterator.multi_thread_data_iterator import MultiThreadDataIterator
-from rec_to_nwb.processing.nwb.components.iterator.multi_thread_timestamp_iterator import MultiThreadTimestampIterator
-from rec_to_nwb.processing.nwb.components.position.old_pos_timestamp_manager import OldPosTimestampManager
-from rec_to_nwb.processing.nwb.components.position.pos_data_manager import PosDataManager
-from rec_to_nwb.processing.tools.beartype.beartype import beartype
-
-
-class OldFlPositionExtractor:
-
- @beartype
- def __init__(self, datasets: list):
- self.datasets = datasets
- self.all_pos = self.__extract_data()
-
- def __extract_data(self):
- all_pos = []
- for dataset in self.datasets:
- data_from_current_dataset = [
- dataset.get_data_path_from_dataset('pos') + pos_file for pos_file in
- dataset.get_all_data_from_dataset('pos') if
- (pos_file.endswith('.pos_online.dat'))]
- all_pos.append(data_from_current_dataset)
- return all_pos
-
- def get_positions(self):
- pos_datas = [
- PosDataManager(directories=[single_pos])
- for single_pos in self.all_pos
- ]
- return [
- MultiThreadDataIterator(pos_data)
- for pos_data in pos_datas
- ]
-
- def get_columns_labels(self):
- pos_datas = [
- PosDataManager(directories=[single_pos])
- for single_pos in self.all_pos
- ]
- return [
- pos_data.get_column_labels_as_string()
- for pos_data in pos_datas
- ]
-
- def get_timestamps(self):
- old_pos_timestamp_managers = [
- OldPosTimestampManager(
- directories=[single_pos],
- )
- for single_pos in self.all_pos
- ]
- return [
- MultiThreadTimestampIterator(old_pos_timestamp_manager)
- for old_pos_timestamp_manager in old_pos_timestamp_managers
- ]
diff --git a/rec_to_nwb/processing/nwb/components/position/old_fl_position_manager.py b/rec_to_nwb/processing/nwb/components/position/old_fl_position_manager.py
deleted file mode 100644
index dd84059c4..000000000
--- a/rec_to_nwb/processing/nwb/components/position/old_fl_position_manager.py
+++ /dev/null
@@ -1,91 +0,0 @@
-import re
-
-from rec_to_nwb.processing.exceptions.invalid_metadata_exception import InvalidMetadataException
-from rec_to_nwb.processing.nwb.components.position.fl_position_builder import FlPositionBuilder
-from rec_to_nwb.processing.nwb.components.position.old_fl_position_extractor import OldFlPositionExtractor
-from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_equal_length
-
-
-class OldFlPositionManager:
-
- @beartype
- def __init__(self, datasets: list, metadata: dict, dataset_names: list, process_timestamps: bool):
- self.datasets = datasets
- self.metadata = metadata
- self.dataset_names = dataset_names
- self.process_timestamps = process_timestamps
-
- self.old_fl_position_extractor = OldFlPositionExtractor(datasets)
- self.fl_position_builder = FlPositionBuilder()
-
- @beartype
- def get_fl_positions(self) -> list:
- cameras_ids = self.__get_cameras_ids(self.dataset_names, self.metadata)
- meters_per_pixels = self.__get_meters_per_pixels(cameras_ids, self.metadata)
-
- position_datas = self.old_fl_position_extractor.get_positions()
- columns_labels = self.old_fl_position_extractor.get_columns_labels()
- if self.process_timestamps:
- timestamps = self.old_fl_position_extractor.get_timestamps()
-
- validate_parameters_equal_length(__name__, position_datas, columns_labels, timestamps)
-
- return [
- self.fl_position_builder.build(
- position_data=position_data,
- column_labels=column_labels,
- timestamps=timestamp,
- conversion=float(meters_per_pixel)
- )
- for position_data, column_labels, timestamp, meters_per_pixel in
- zip(position_datas, columns_labels, timestamps, meters_per_pixels)
- ]
- else:
- validate_parameters_equal_length(__name__, position_datas, columns_labels)
-
- return [
- self.fl_position_builder.build(
- position_data=position_data,
- column_labels=column_labels,
- timestamps=[],
- conversion=float(meters_per_pixel)
- )
- for position_data, column_labels, meters_per_pixel in
- zip(position_datas, columns_labels, meters_per_pixels)
- ]
-
- @staticmethod
- def __get_cameras_ids(dataset_names, metadata):
- camera_ids = []
- for dataset_name in dataset_names:
- dataset_name = re.sub(r'_\w\d\d?', '', dataset_name)
- dataset_name = re.sub(r'^[0]', '', dataset_name)
-
- try:
- camera_ids.append(
- next(
- task['camera_id']
- for task in metadata['tasks']
- if dataset_name in task['task_epochs']
- )[0]
- )
- except:
- raise InvalidMetadataException('Invalid camera metadata for datasets')
- return camera_ids
-
- @staticmethod
- def __get_meters_per_pixels(cameras_ids, metadata):
- meters_per_pixels = []
- for camera_id in cameras_ids:
- try:
- meters_per_pixels.append(
- next(
- float(camera['meters_per_pixel'])
- for camera in metadata['cameras']
- if camera_id == camera['id']
- )
- )
- except:
- raise InvalidMetadataException('Invalid camera metadata')
- return meters_per_pixels
diff --git a/rec_to_nwb/processing/nwb/components/position/old_pos_timestamp_manager.py b/rec_to_nwb/processing/nwb/components/position/old_pos_timestamp_manager.py
deleted file mode 100644
index 8ece48536..000000000
--- a/rec_to_nwb/processing/nwb/components/position/old_pos_timestamp_manager.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import logging.config
-import os
-
-import pandas as pd
-from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
-from rec_to_nwb.processing.nwb.common.old_timestamps_manager import OldTimestampManager
-
-path = os.path.dirname(os.path.abspath(__file__))
-
-logging.config.fileConfig(fname=str(path) + '/../../../../logging.conf', disable_existing_loggers=False)
-logger = logging.getLogger(__name__)
-
-
-class OldPosTimestampManager(OldTimestampManager):
- def __init__(self, directories):
- OldTimestampManager.__init__(self, directories)
-
- # override
- def _get_timestamps(self, dataset_id):
- pos_online = readTrodesExtractedDataFile(self.directories[dataset_id][0])
- position = pd.DataFrame(pos_online['data'])
- return position.time.to_numpy(dtype='int64')
diff --git a/rec_to_nwb/processing/nwb/components/position/pos_data_manager.py b/rec_to_nwb/processing/nwb/components/position/pos_data_manager.py
index 9e30bbfaf..c09ce9d56 100644
--- a/rec_to_nwb/processing/nwb/components/position/pos_data_manager.py
+++ b/rec_to_nwb/processing/nwb/components/position/pos_data_manager.py
@@ -1,6 +1,5 @@
import pandas as pd
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
from rec_to_nwb.processing.nwb.common.data_manager import DataManager
@@ -11,7 +10,8 @@ def __init__(self, directories):
# override
def read_data(self, dataset_id, file_id):
"""extract data from POS files and build FlPos"""
- pos_online = readTrodesExtractedDataFile(self.directories[dataset_id][file_id])
+ pos_online = readTrodesExtractedDataFile(
+ self.directories[dataset_id][file_id])
position = pd.DataFrame(pos_online['data'])
labels = self.get_column_labels()
filtered_position = [position[label] for label in labels]
@@ -26,7 +26,8 @@ def get_column_labels(self):
return column_labels_list
def get_column_labels_as_string(self):
- """extract column labels from POS files and converts them do single string"""
+ """extract column labels from POS files and converts them do single
+ string"""
labels = self.get_column_labels()
labels_string = ''
labels_string = ', '.join(labels)
diff --git a/rec_to_nwb/processing/nwb/components/position/pos_timestamp_manager.py b/rec_to_nwb/processing/nwb/components/position/pos_timestamp_manager.py
index 03bbe5234..e58f8a203 100644
--- a/rec_to_nwb/processing/nwb/components/position/pos_timestamp_manager.py
+++ b/rec_to_nwb/processing/nwb/components/position/pos_timestamp_manager.py
@@ -1,23 +1,82 @@
import logging.config
import os
+import numpy as np
import pandas as pd
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
-from rec_to_nwb.processing.nwb.common.timestamps_manager import TimestampManager
+from rec_to_nwb.processing.nwb.common.timestamps_manager import \
+ TimestampManager
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
+NANOSECONDS_PER_SECOND = 1E9
+
+
class PosTimestampManager(TimestampManager):
- def __init__(self, directories, continuous_time_directories):
- TimestampManager.__init__(self, directories, continuous_time_directories)
+ def __init__(self, directories, continuous_time_directories,
+ convert_timestamps=True):
+ TimestampManager.__init__(
+ self, directories, continuous_time_directories)
+ self.convert_timestamps = convert_timestamps
# override
def _get_timestamps(self, dataset_id):
- pos_online = readTrodesExtractedDataFile(self.directories[dataset_id][0])
+ """Gets timestamps from the online position tracking"""
+ pos_online = readTrodesExtractedDataFile(
+ self.directories[dataset_id][0])
position = pd.DataFrame(pos_online['data'])
- return position.time.to_numpy(dtype='int64')
+ return position.time.unique().astype('int64')
+
+ def retrieve_real_timestamps(self, dataset_id):
+ """Gets the corresponding Trodes timestamps from the online position
+ tracking and matches them to the PTP time in the video file.
+
+ Otherwise, we get the corresponding timestamps from
+ continuous time which corresponds to the neural recording time stamps.
+
+ If there is no corresponding timestamp, the result will be NaN.
+
+ Parameters
+ ----------
+ dataset_id : int
+ Index of the epoch
+
+ Returns
+ -------
+ timestamps : ndarray, shape (n_online_tracked_positions,)
+
+ """
+ try:
+ # Get online position tracking data
+ pos_online_path = self.directories[dataset_id][0]
+ pos_online = readTrodesExtractedDataFile(pos_online_path)
+ pos_online = pd.DataFrame(pos_online['data'])
+ # Make sure to get only the unique timestamps because they can
+ # sometimes repeat after a jump in timestamps
+ online_timestamps_ind = pos_online.time.unique().astype(np.uint64)
+
+ # Get video PTP timestamps
+ camera_hwsync = readTrodesExtractedDataFile(
+ pos_online_path.replace(
+ '.pos_online.dat', '.pos_cameraHWFrameCount.dat'))
+ camera_hwsync = (pd.DataFrame(camera_hwsync['data'])
+ .set_index('PosTimestamp'))
+
+ # Find the PTP timestamps that correspond to position tracking
+ # Convert from nanoseconds to seconds
+ return (camera_hwsync.loc[online_timestamps_ind, 'HWTimestamp']
+ / NANOSECONDS_PER_SECOND).to_numpy()
+ except KeyError:
+ # If PTP timestamps do not exist find the corresponding timestamps
+ # from the neural recording
+ logger.info('No PTP timestamps found. Using neural timestamps.')
+ return TimestampManager.retrieve_real_timestamps(
+ self, dataset_id,
+ convert_timestamps=self.convert_timestamps)
diff --git a/rec_to_nwb/processing/nwb/components/position/position_creator.py b/rec_to_nwb/processing/nwb/components/position/position_creator.py
index c0181b855..b90ac063b 100644
--- a/rec_to_nwb/processing/nwb/components/position/position_creator.py
+++ b/rec_to_nwb/processing/nwb/components/position/position_creator.py
@@ -1,8 +1,9 @@
from pynwb.behavior import Position
-
-from rec_to_nwb.processing.nwb.components.position.fl_position import FlPosition
+from rec_to_nwb.processing.nwb.components.position.fl_position import \
+ FlPosition
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_not_none
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_not_none
class PositionCreator:
@@ -17,7 +18,9 @@ def create_all(self, fl_positions: list) -> Position:
@staticmethod
@beartype
def create(position: Position, series_id: int, fl_position: FlPosition):
- validate_parameters_not_none(__name__, fl_position.column_labels, fl_position.position_data,
+ validate_parameters_not_none(__name__,
+ fl_position.column_labels,
+ fl_position.position_data,
fl_position.conversion)
position.create_spatial_series(
name='series_' + str(series_id),
diff --git a/rec_to_nwb/processing/nwb/components/position/time/valid/fl_pos_valid_time_manager.py b/rec_to_nwb/processing/nwb/components/position/time/valid/fl_pos_valid_time_manager.py
index 6a22f45a1..aad9dd1f3 100644
--- a/rec_to_nwb/processing/nwb/components/position/time/valid/fl_pos_valid_time_manager.py
+++ b/rec_to_nwb/processing/nwb/components/position/time/valid/fl_pos_valid_time_manager.py
@@ -1,10 +1,12 @@
import numpy as np
from pynwb import NWBFile
-
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
-from rec_to_nwb.processing.nwb.components.position.time.valid.fl_pos_valid_time_builder import FlPosValidTimeBuilder
+from rec_to_nwb.processing.exceptions.missing_data_exception import \
+ MissingDataException
+from rec_to_nwb.processing.nwb.components.position.time.valid.fl_pos_valid_time_builder import \
+ FlPosValidTimeBuilder
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.get_times_period_multiplier import get_times_period_multiplier
+from rec_to_nwb.processing.tools.get_times_period_multiplier import \
+ get_times_period_multiplier
class FlPosValidTimeManager:
@@ -37,7 +39,8 @@ def get_fl_pos_valid_times(self, nwb_content: NWBFile, gaps_margin: float = 0.00
timestamps = self.__get_pos_timestamps(nwb_content)
pos_period = self.__calculate_pos_period(timestamps)
- valid_times = self.__get_pos_valid_times(timestamps, pos_period, gaps_margin)
+ valid_times = self.__get_pos_valid_times(
+ timestamps, pos_period, gaps_margin)
return self.__build_pos_valid_times(valid_times)
@staticmethod
@@ -45,7 +48,8 @@ def __get_pos_timestamps(nwb_content):
timestamps = [
np.array(spatial_series.timestamps)
for spatial_series in
- nwb_content.processing['behavior'].data_interfaces['position'].spatial_series.values()
+ nwb_content.processing['behavior'].data_interfaces['position'].spatial_series.values(
+ )
]
timestamp = np.hstack(timestamps)
@@ -67,29 +71,29 @@ def __calculate_pos_period(timestamps):
first_timestamp = timestamps[number_of_invalid_records_at_start_of_a_file]
while not last_timestamp >= 0:
number_of_invalid_records_at_end_of_a_file += 1
- last_timestamp = timestamps[(-1 - number_of_invalid_records_at_end_of_a_file)]
+ last_timestamp = timestamps[(-1 -
+ number_of_invalid_records_at_end_of_a_file)]
return (last_timestamp - first_timestamp) / \
(len_of_timestamps - number_of_invalid_records_at_end_of_a_file -
number_of_invalid_records_at_start_of_a_file)
def __get_pos_valid_times(self, timestamps, period, gaps_margin):
- min_valid_len = 3*gaps_margin
+ min_valid_len = 3 * gaps_margin
timestamps = timestamps[~np.isnan(timestamps)]
gaps = np.diff(timestamps) > period * self.period_multiplier
gap_indexes = np.asarray(np.where(gaps))
gap_start = np.insert(gap_indexes + 1, 0, 0)
- gap_end = np.append(gap_indexes, np.asarray(len(timestamps)-1))
+ gap_end = np.append(gap_indexes, np.asarray(len(timestamps) - 1))
valid_indices = np.vstack([gap_start, gap_end]).transpose()
valid_times = timestamps[valid_indices]
valid_times[:, 0] = valid_times[:, 0] + gaps_margin
valid_times[:, 1] = valid_times[:, 1] - gaps_margin
- valid_intervals = [valid_time > min_valid_len for valid_time in valid_times[:, 1] - valid_times[:, 0]]
+ valid_intervals = [
+ valid_time > min_valid_len for valid_time in valid_times[:, 1] - valid_times[:, 0]]
return valid_times[valid_intervals, :]
@staticmethod
def __build_pos_valid_times(valid_times):
return [FlPosValidTimeBuilder.build(gap[0], gap[1]) for gap in valid_times]
-
-
diff --git a/rec_to_nwb/processing/nwb/components/processing_module/processing_module_creator.py b/rec_to_nwb/processing/nwb/components/processing_module/processing_module_creator.py
index eece782b7..be27deec6 100644
--- a/rec_to_nwb/processing/nwb/components/processing_module/processing_module_creator.py
+++ b/rec_to_nwb/processing/nwb/components/processing_module/processing_module_creator.py
@@ -5,7 +5,10 @@
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -19,4 +22,5 @@ def insert(self, data):
self.processing_module.add(data)
except TypeError as err:
# log error instead
- logger.error('Inserting data into processing module has failed: ' + str(err))
+ logger.error(
+ 'Inserting data into processing module has failed: ' + str(err))
diff --git a/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_extractor.py b/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_extractor.py
index 3db86ff41..50e0d6d77 100644
--- a/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_extractor.py
@@ -1,5 +1,4 @@
import numpy as np
-
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
@@ -20,8 +19,12 @@ def __merge_data_from_multiple_files(self, data):
def __get_continuous_time_data_from_single_file(self, continuous_time_file):
continuous_time = readTrodesExtractedDataFile(continuous_time_file)
- new_array = np.ndarray(shape=(len(continuous_time['data']), 2), dtype='int64')
- for i, single_timestamp in enumerate(continuous_time['data']):
- new_array[i, 0] = single_timestamp[0]
- new_array[i, 1] = single_timestamp[3]
+ new_array = np.ndarray(
+ shape=(len(continuous_time['data']), 2), dtype='int64')
+ new_array[:, 0] = continuous_time['data']['trodestime']
+ new_array[:, 1] = continuous_time['data']['adjusted_systime']
+
+ # for i, single_timestamp in enumerate(continuous_time['data']):
+ # new_array[i, 0] = single_timestamp[0]
+ # new_array[i, 1] = single_timestamp[3]
return new_array
diff --git a/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_injector.py b/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_injector.py
index c970fee83..00d6622b0 100644
--- a/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_injector.py
+++ b/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_injector.py
@@ -4,4 +4,3 @@ class SampleCountTimestampCorespondenceInjector:
def inject(timeseries, processing_module_name, nwb_content):
"""insert timeseries series to nwb file"""
nwb_content.processing[processing_module_name].add(timeseries)
-
diff --git a/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_manager.py b/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_manager.py
index 348d7e45d..ef906e1be 100644
--- a/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_manager.py
+++ b/rec_to_nwb/processing/nwb/components/sample_count_timestamp_corespondence/sample_count_timestamp_corespondence_manager.py
@@ -1,13 +1,15 @@
-from rec_to_nwb.processing.nwb.components.sample_count_timestamp_corespondence.sample_count_timestamp_corespondence_extractor import \
- SampleCountTimestampCorespondenceExtractor
from rec_to_nwb.processing.nwb.components.sample_count_timestamp_corespondence.sample_count_timestamp_corespondence_builder import \
SampleCountTimestampCorespondenceBuilder
+from rec_to_nwb.processing.nwb.components.sample_count_timestamp_corespondence.sample_count_timestamp_corespondence_extractor import \
+ SampleCountTimestampCorespondenceExtractor
class SampleCountTimestampCorespondenceManager:
def __init__(self, datasets):
- self.continuous_time_files = [dataset.get_continuous_time() for dataset in datasets]
- self.extractor = SampleCountTimestampCorespondenceExtractor(self.continuous_time_files)
+ self.continuous_time_files = [
+ dataset.get_continuous_time() for dataset in datasets]
+ self.extractor = SampleCountTimestampCorespondenceExtractor(
+ self.continuous_time_files)
def get_timeseries(self):
data = self.extractor.extract()
diff --git a/rec_to_nwb/processing/nwb/components/task/task_builder.py b/rec_to_nwb/processing/nwb/components/task/task_builder.py
index 2b476793f..2bfaf1be0 100644
--- a/rec_to_nwb/processing/nwb/components/task/task_builder.py
+++ b/rec_to_nwb/processing/nwb/components/task/task_builder.py
@@ -5,9 +5,9 @@
class TaskBuilder:
@staticmethod
- def build(name, description, task_name, task_description, camera_id, task_epochs):
+ def build(name, description, task_name, task_description, camera_id, task_epochs, task_environment):
return FlTask(
name=str(name),
description=description,
- columns=[task_name, task_description, camera_id, task_epochs]
+ columns=[task_name, task_description, camera_id, task_epochs, task_environment]
)
diff --git a/rec_to_nwb/processing/nwb/components/task/task_creator.py b/rec_to_nwb/processing/nwb/components/task/task_creator.py
index 137cc7a2f..2b4600cc6 100644
--- a/rec_to_nwb/processing/nwb/components/task/task_creator.py
+++ b/rec_to_nwb/processing/nwb/components/task/task_creator.py
@@ -1,5 +1,4 @@
from hdmf.common.table import DynamicTable
-
from rec_to_nwb.processing.nwb.components.task.fl_task import FlTask
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/task/task_manager.py b/rec_to_nwb/processing/nwb/components/task/task_manager.py
index 1366c9add..583193c50 100644
--- a/rec_to_nwb/processing/nwb/components/task/task_manager.py
+++ b/rec_to_nwb/processing/nwb/components/task/task_manager.py
@@ -1,8 +1,8 @@
from hdmf.common.table import VectorData
-
from rec_to_nwb.processing.nwb.components.task.task_builder import TaskBuilder
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.validate_parameters import validate_parameters_not_none
+from rec_to_nwb.processing.tools.validate_parameters import \
+ validate_parameters_not_none
class TaskManager:
@@ -16,17 +16,24 @@ def __init__(self, metadata: dict):
def get_fl_tasks(self):
validate_parameters_not_none(__name__, self.metadata['tasks'])
- return [
- self.__get_single_fl_task(
+ task_list = []
+ for task in self.metadata["tasks"]: # for each task
+ # Set task_environment to "none" if not in metadata
+ if "task_environment" not in task.keys():
+ task_environment = "none"
+ else:
+ task_environment = task["task_environment"]
+ # Append task to list
+ task_list.append(self.__get_single_fl_task(
task_name=task['task_name'],
task_description=task['task_description'],
camera_id=[int(camera_id) for camera_id in task['camera_id']],
- task_epochs=[int(epoch) for epoch in task['task_epochs']]
- )
- for task in self.metadata['tasks']
- ]
+ task_epochs=[int(epoch) for epoch in task['task_epochs']],
+ task_environment=task_environment
+ ))
+ return task_list
- def __get_single_fl_task(self, task_name, task_description, camera_id, task_epochs):
+ def __get_single_fl_task(self, task_name, task_description, camera_id, task_epochs, task_environment):
task_name_data = VectorData(
name='task_name',
description='the name of the task',
@@ -47,6 +54,11 @@ def __get_single_fl_task(self, task_name, task_description, camera_id, task_epoc
description='the temporal epochs where the animal was exposed to this task',
data=[task_epochs]
)
+ environment_data = VectorData(
+ name='task_environment',
+ description='the environment in which the animal performed the task',
+ data=[task_environment]
+ )
task = TaskBuilder.build(
name='task_' + str(self.task_counter),
@@ -54,7 +66,8 @@ def __get_single_fl_task(self, task_name, task_description, camera_id, task_epoc
task_name=task_name_data,
task_description=task_description_data,
camera_id=camera_id_data,
- task_epochs=task_epochs_data
+ task_epochs=task_epochs_data,
+ task_environment=environment_data
)
self.task_counter += 1
diff --git a/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_builder.py b/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_builder.py
index f01a61044..bb817aeb0 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_builder.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_builder.py
@@ -1,3 +1,5 @@
+"""Creates a NWB time series object that corresponds to the video frame counts
+"""
from pynwb import TimeSeries
@@ -13,5 +15,6 @@ def __build_timeseries(cls, data):
return TimeSeries(name="camera_frame_counts",
description="hardware frame count",
data=data[:, 0],
- timestamps=data[:, 1]
+ timestamps=data[:, 1],
+ unit='samples'
)
diff --git a/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_extractor.py b/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_extractor.py
index 9234c2527..706ceda8a 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_extractor.py
@@ -1,7 +1,9 @@
+"""Returns the video frame counts and timestamps for all epochs."""
+import glob
import os
import numpy as np
-
+import pandas as pd
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
@@ -10,31 +12,30 @@ def __init__(self, raw_data_path):
self.raw_data_path = raw_data_path
def extract(self):
- data = []
- for file in self.__get_all_hwsync_files():
- data.append(self.__extract_single(file))
- merged_data = self.__merge_data_from_multiple_files(data)
- return merged_data
-
- def __get_all_hwsync_files(self):
- all_files = os.listdir(self.raw_data_path)
- hwsync_files = []
- for file in all_files:
- if 'videoTimeStamps.cameraHWSync' in file:
- hwsync_files.append(file)
- return hwsync_files
-
- @staticmethod
- def __merge_data_from_multiple_files(data):
- merged_data = np.vstack(data)
- return merged_data
-
- def __extract_single(self, hw_frame_count_filename):
- content = readTrodesExtractedDataFile(
- self.raw_data_path + "/" + hw_frame_count_filename
- )["data"]
- camera_sample_frame_counts = np.ndarray(shape = (len(content), 2), dtype='uint32')
- for i, record in enumerate(content):
- camera_sample_frame_counts[i, 0] = record[1]
- camera_sample_frame_counts[i, 1] = record[0]
- return camera_sample_frame_counts
+ """Returns the video frame counts and timestamps for all epochs.
+
+ If precision time protocol (PTP) timestamps do not exist, then
+ timestamps are simply just a count of the frames in that epoch.
+ """
+ files = glob.glob(
+ os.path.join(self.raw_data_path, '*.videoTimeStamps.cameraHWSync'))
+ if len(files) == 0:
+ # in case of old dataset
+ files = glob.glob(
+ os.path.join(self.raw_data_path,
+ '*.videoTimeStamps.cameraHWFrameCount'))
+ return np.vstack([self.__extract_single(file) for file in files])
+
+ def __extract_single(self, filename):
+ """Returns the video frame counts and timestamps for a single epoch."""
+ content = pd.DataFrame(
+ readTrodesExtractedDataFile(
+ os.path.join(self.raw_data_path, filename)
+ )["data"])
+ try:
+ # columns: frame count, timestamps
+ return content.iloc[:, [1, 0]].to_numpy()
+ except IndexError:
+ return np.vstack((content.iloc[:, 0].to_numpy(), # frame counts
+ np.arange(len(content))) # dummy timestamps
+ ).T.astype(np.uint32)
diff --git a/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_injector.py b/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_injector.py
index 0e529fc2d..3b2901e4a 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_injector.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_injector.py
@@ -2,5 +2,5 @@ class CameraSampleFrameCountsInjector:
@staticmethod
def inject(timeseries, processing_module_name, nwb_content):
- """insert timeseries series to nwb file"""
- nwb_content.processing[processing_module_name].add(timeseries)
\ No newline at end of file
+ """Insert timeseries series to nwb file"""
+ nwb_content.processing[processing_module_name].add(timeseries)
diff --git a/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_manager.py b/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_manager.py
index fd74794ea..4ef096a9c 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_manager.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/camera_sample_frame_counts/camera_sample_frame_counts_manager.py
@@ -1,3 +1,5 @@
+"""Gets the video frame counts and timestamps for all epochs and inserts them
+into an NWB timeseries object"""
from rec_to_nwb.processing.nwb.components.video_files.camera_sample_frame_counts.camera_sample_frame_counts_builder import \
CameraSampleFrameCountsBuilder
from rec_to_nwb.processing.nwb.components.video_files.camera_sample_frame_counts.camera_sample_frame_counts_extractor import \
@@ -9,6 +11,8 @@ def __init__(self, raw_data_path):
self.extractor = CameraSampleFrameCountsExtractor(raw_data_path)
def get_timeseries(self):
+ """Gets the video frame counts and timestamps for all epochs and
+ inserts them into an NWB timeseries object"""
data = self.extractor.extract()
builder = CameraSampleFrameCountsBuilder(data)
- return builder.build()
\ No newline at end of file
+ return builder.build()
diff --git a/rec_to_nwb/processing/nwb/components/video_files/fl_video_file.py b/rec_to_nwb/processing/nwb/components/video_files/fl_video_file.py
index c9c972cdd..d01b08932 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/fl_video_file.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/fl_video_file.py
@@ -1,5 +1,4 @@
from numpy.core.multiarray import ndarray
-
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_builder.py b/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_builder.py
index e42beb1ce..254e999d5 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_builder.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_builder.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.nwb.components.video_files.fl_video_file import FlVideoFile
+from rec_to_nwb.processing.nwb.components.video_files.fl_video_file import \
+ FlVideoFile
class FlVideoFilesBuilder:
diff --git a/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_extractor.py b/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_extractor.py
index c31aedd03..82265c8ff 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_extractor.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_extractor.py
@@ -1,34 +1,145 @@
+"""Returns the name, timestamps and device for each video file"""
+import logging.config
+import os
+
import numpy as np
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
from rec_to_nwb.processing.tools.beartype.beartype import beartype
+path = os.path.dirname(os.path.abspath(__file__))
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir,
+ os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
+logger = logging.getLogger(__name__)
+
+NANOSECONDS_PER_SECOND = 1E9
+
class FlVideoFilesExtractor:
@beartype
- def __init__(self, raw_data_path: str, video_files_metadata: list):
+ def __init__(self,
+ raw_data_path: str,
+ video_files_metadata: list,
+ convert_timestamps: bool = True,
+ return_timestamps: bool = True):
self.raw_data_path = raw_data_path
self.video_files_metadata = video_files_metadata
+ self.convert_timestamps = convert_timestamps
+ self.return_timestamps = return_timestamps
def extract_video_files(self):
+ """Returns the name, timestamps and device for each video file"""
video_files = self.video_files_metadata
extracted_video_files = []
for video_file in video_files:
+ if self.return_timestamps:
+ timestamps = self._get_timestamps(video_file)
+ else:
+ timestamps = np.array([])
new_fl_video_file = {
"name": video_file["name"],
- "timestamps": self.convert_timestamps(readTrodesExtractedDataFile(
- self.raw_data_path + "/"
- + video_file["name"][:-4]
- + "videoTimeStamps.cameraHWSync"
- )["data"]),
+ "timestamps": timestamps,
"device": video_file["camera_id"]
}
extracted_video_files.append(new_fl_video_file)
return extracted_video_files
- def convert_timestamps(self, timestamps):
- converted_timestamps = np.ndarray(shape=np.shape(timestamps), dtype='float64')
- for i, record in enumerate(timestamps):
- converted_timestamps[i] = record[2]/1E9
- return converted_timestamps
+ def _get_timestamps(self, video_file):
+ """Retrieves the video timestamps.
+
+ Timestamps are in units of seconds and will be either relative to the
+ start of the recording (if old dataset) or in seconds since 1/1/1970
+ if precision time protocol (PTP) is used to synchronize the camera
+ frames to Trodes data packets.
+
+ Parameters
+ ----------
+ video_file : str
+
+ Returns
+ -------
+ timestamps : ndarray
+
+ """
+ try:
+ video_timestamps = self._read_video_timestamps_hw_sync(video_file)
+ logger.info('Loaded cameraHWSync timestamps for {}'.format(
+ video_file['name'][:-4]))
+ is_old_dataset = False
+ except FileNotFoundError:
+ # old dataset
+ video_timestamps = self._read_video_timestamps_hw_framecount(
+ video_file)
+ logger.info(
+ 'Loaded cameraHWFrameCount for {} (old dataset)'.format(
+ video_file['name'][:-4]))
+ is_old_dataset = True
+ # the timestamps array from the cam
+ if is_old_dataset or (not self.convert_timestamps):
+ # for now, FORCE turn off convert_timestamps for old dataset
+ return video_timestamps
+ return self._convert_timestamps(video_timestamps)
+
+ def _read_video_timestamps_hw_sync(self, video_file):
+ """Returns video timestamps in unix time which are synchronized to the
+ Trodes data packets.
+
+ videoTimeStamps.cameraHWSync is a file extracted by the python package
+ `rec_to_binaries` from the .rec file. It only is extracted when using
+ precision time protocol (PTP) to synchronize the camera clock with
+ Trodes data packets. The HWTimestamp array in this file are the unix
+ timestamps relative to seconds since 1/1/1970.
+
+ Parameters
+ ----------
+ video_file : str
+
+ Returns
+ -------
+ unix_timestamps : ndarray
+
+ """
+ return readTrodesExtractedDataFile(
+ os.path.join(
+ self.raw_data_path,
+ os.path.splitext(video_file["name"])[0] +
+ ".videoTimeStamps.cameraHWSync")
+ )['data']['HWTimestamp']
+
+ def _read_video_timestamps_hw_framecount(self, video_file):
+ """Returns the index of video frames.
+
+ If PTP is not in use, only the videoTimeStamps.cameraHWFrameCount
+ file is generated by the `rec_to_binaries` package.
+
+ Parameters
+ ----------
+ video_file : str
+
+ Returns
+ -------
+ index : ndarray
+
+ """
+ return readTrodesExtractedDataFile(
+ os.path.join(
+ self.raw_data_path,
+ os.path.splitext(video_file["name"])[0] +
+ ".videoTimeStamps.cameraHWFrameCount")
+ )['data']['frameCount']
+
+ def _convert_timestamps(self, timestamps):
+ """Converts timestamps from nanoseconds to seconds
+
+ Parameters
+ ----------
+ timestamps : int
+
+ Returns
+ -------
+ timestamps : float
+
+ """
+ return timestamps / NANOSECONDS_PER_SECOND
diff --git a/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_manager.py b/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_manager.py
index e25b195c9..33ae1bb59 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_manager.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/fl_video_files_manager.py
@@ -1,19 +1,35 @@
-from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_builder import FlVideoFilesBuilder
-from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_extractor import FlVideoFilesExtractor
-from rec_to_nwb.processing.nwb.components.video_files.video_files_copy_maker import VideoFilesCopyMaker
+"""Gets the name, timestamps and device for each video file and inserts them
+into a FlVideoFile object. Also copies the video files to `video_path`"""
+from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_builder import \
+ FlVideoFilesBuilder
+from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_extractor import \
+ FlVideoFilesExtractor
+from rec_to_nwb.processing.nwb.components.video_files.video_files_copy_maker import \
+ VideoFilesCopyMaker
from rec_to_nwb.processing.tools.beartype.beartype import beartype
class FlVideoFilesManager:
@beartype
- def __init__(self, raw_data_path: str, video_path: str, video_files_metadata: list):
- self.video_files_copy_maker = VideoFilesCopyMaker([video_files['name'] for video_files in video_files_metadata])
+ def __init__(self,
+ raw_data_path: str,
+ video_path: str,
+ video_files_metadata: list,
+ convert_timestamps: bool = True,
+ return_timestamps: bool = True):
+ self.video_files_copy_maker = VideoFilesCopyMaker(
+ [video_files['name'] for video_files in video_files_metadata])
self.video_files_copy_maker.copy(raw_data_path, video_path)
- self.fl_video_files_extractor = FlVideoFilesExtractor(raw_data_path, video_files_metadata)
+ self.fl_video_files_extractor = FlVideoFilesExtractor(
+ raw_data_path, video_files_metadata,
+ convert_timestamps=convert_timestamps,
+ return_timestamps=return_timestamps)
self.fl_video_files_builder = FlVideoFilesBuilder()
def get_video_files(self):
+ """Gets the name, timestamps and device for each video file and
+ inserts them into a FlVideoFile object"""
extracted_video_files = self.fl_video_files_extractor.extract_video_files()
return [
self.fl_video_files_builder.build(
diff --git a/rec_to_nwb/processing/nwb/components/video_files/old_fl_video_files_extractor.py b/rec_to_nwb/processing/nwb/components/video_files/old_fl_video_files_extractor.py
deleted file mode 100644
index 945efeced..000000000
--- a/rec_to_nwb/processing/nwb/components/video_files/old_fl_video_files_extractor.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import numpy as np
-from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
-from rec_to_nwb.processing.tools.beartype.beartype import beartype
-
-
-class OldFlVideoFilesExtractor:
-
- @beartype
- def __init__(self, raw_data_path: str, video_files_metadata: list):
- self.raw_data_path = raw_data_path
- self.video_files_metadata = video_files_metadata
-
- def extract_video_files(self):
- video_files = self.video_files_metadata
- extracted_video_files = []
- for video_file in video_files:
- new_fl_video_file = {
- "name": video_file["name"],
- "timestamps": [],
- "device": video_file["camera_id"]
- }
- extracted_video_files.append(new_fl_video_file)
- return extracted_video_files
diff --git a/rec_to_nwb/processing/nwb/components/video_files/old_fl_video_files_manager.py b/rec_to_nwb/processing/nwb/components/video_files/old_fl_video_files_manager.py
deleted file mode 100644
index 08b490968..000000000
--- a/rec_to_nwb/processing/nwb/components/video_files/old_fl_video_files_manager.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_builder import FlVideoFilesBuilder
-from rec_to_nwb.processing.nwb.components.video_files.fl_video_files_extractor import FlVideoFilesExtractor
-from rec_to_nwb.processing.nwb.components.video_files.old_fl_video_files_extractor import OldFlVideoFilesExtractor
-from rec_to_nwb.processing.nwb.components.video_files.video_files_copy_maker import VideoFilesCopyMaker
-from rec_to_nwb.processing.tools.beartype.beartype import beartype
-
-
-class OldFlVideoFilesManager:
-
- @beartype
- def __init__(self, raw_data_path: str, video_path: str, video_files_metadata: list):
- self.video_files_copy_maker = VideoFilesCopyMaker([video_files['name'] for video_files in video_files_metadata])
- self.video_files_copy_maker.copy(raw_data_path, video_path)
- self.old_fl_video_files_extractor = OldFlVideoFilesExtractor(raw_data_path, video_files_metadata)
- self.fl_video_files_builder = FlVideoFilesBuilder()
-
- def get_video_files(self):
- extracted_video_files = self.old_fl_video_files_extractor.extract_video_files()
- return [
- self.fl_video_files_builder.build(
- video_file["name"],
- video_file["timestamps"],
- video_file["device"]
- )
- for video_file in extracted_video_files
- ]
diff --git a/rec_to_nwb/processing/nwb/components/video_files/video_files_copy_maker.py b/rec_to_nwb/processing/nwb/components/video_files/video_files_copy_maker.py
index 5106c88a4..ea3be8182 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/video_files_copy_maker.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/video_files_copy_maker.py
@@ -1,7 +1,8 @@
import os
from shutil import copy as copy_file
-from rec_to_nwb.processing.exceptions.invalid_path_exception import InvalidPathException
+from rec_to_nwb.processing.exceptions.invalid_path_exception import \
+ InvalidPathException
class VideoFilesCopyMaker:
@@ -13,5 +14,4 @@ def copy(self, src, dst):
if not os.path.exists(dst):
raise InvalidPathException(dst + ' is not valid path')
for video_file in self.video_files_to_copy:
- copy_file(src + '/' + video_file, dst)
-
+ copy_file(os.path.join(src, video_file), dst)
diff --git a/rec_to_nwb/processing/nwb/components/video_files/video_files_creator.py b/rec_to_nwb/processing/nwb/components/video_files/video_files_creator.py
index 2f7c1645d..436fd52b0 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/video_files_creator.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/video_files_creator.py
@@ -1,15 +1,19 @@
-from ndx_franklab_novela.nwb_image_series import NwbImageSeries
+"""Creates a ImageSeries object that corresponds to the video"""
+import os
+
+from pynwb.image import ImageSeries
class VideoFilesCreator:
@staticmethod
def create(fl_video_file, video_directory, nwb_content):
- return NwbImageSeries(
- devices=[nwb_content.devices['camera_device ' + str(fl_video_file.device)]],
+ return ImageSeries(
+ device=nwb_content.devices['camera_device ' +
+ str(fl_video_file.device)],
name=fl_video_file.name,
timestamps=fl_video_file.timestamps,
- external_file=[video_directory + '/' + fl_video_file.name],
+ external_file=[os.path.join(video_directory, fl_video_file.name)],
format='external',
starting_frame=[0],
description='video of animal behavior from epoch'
diff --git a/rec_to_nwb/processing/nwb/components/video_files/video_files_injector.py b/rec_to_nwb/processing/nwb/components/video_files/video_files_injector.py
index 14217bae4..5d3188d69 100644
--- a/rec_to_nwb/processing/nwb/components/video_files/video_files_injector.py
+++ b/rec_to_nwb/processing/nwb/components/video_files/video_files_injector.py
@@ -1,6 +1,7 @@
+"""Inserts the ImageSeries object into an NWBFile under the path:
+processing/video_files"""
from pynwb import NWBFile
from pynwb.behavior import BehavioralEvents
-
from rec_to_nwb.processing.tools.beartype.beartype import beartype
diff --git a/rec_to_nwb/processing/time/continuous_time_extractor.py b/rec_to_nwb/processing/time/continuous_time_extractor.py
index 2a820965b..3b0502cfe 100644
--- a/rec_to_nwb/processing/time/continuous_time_extractor.py
+++ b/rec_to_nwb/processing/time/continuous_time_extractor.py
@@ -1,10 +1,13 @@
import logging.config
import os
+import numpy as np
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -20,3 +23,9 @@ def get_continuous_time_dict_file(file):
continuous_time = readTrodesExtractedDataFile(file)
return {str(data[0]): float(data[3]) for data in continuous_time['data']}
+ @staticmethod
+ def get_continuous_time_array_file(file):
+ logger.info('Reading continuous time array from: ' + str(file))
+ continuous_time = readTrodesExtractedDataFile(file)
+ return np.vstack((continuous_time['data']['trodestime'],
+ continuous_time['data']['adjusted_systime']))
diff --git a/rec_to_nwb/processing/time/timestamp_converter.py b/rec_to_nwb/processing/time/timestamp_converter.py
index 96213520e..c6cf7f0a2 100644
--- a/rec_to_nwb/processing/time/timestamp_converter.py
+++ b/rec_to_nwb/processing/time/timestamp_converter.py
@@ -5,20 +5,51 @@
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
+NANOSECONDS_PER_SECOND = 1E9
+
class TimestampConverter:
@staticmethod
- def convert_timestamps(continuous_time_dict, timestamps):
- converted_timestamps = np.ndarray(shape=[len(timestamps), ], dtype="float64")
- for i, timestamp in enumerate(timestamps):
- key = str(timestamp)
- value = continuous_time_dict.get(key, float('nan')) / 1E9
- if np.isnan(value):
- message = 'Following key: ' + str(key) + ' does not exist in continioustime dictionary!'
- logger.exception(message)
- converted_timestamps[i] = value
+ def convert_timestamps(continuous_times, timestamps):
+ '''Matches the trodes timestamp index from the camera to the adjusted
+ timestamps (in unix time) from the ephys recording.
+
+ The adjusted timestamps are the ephys recoding timestamps adjusted for
+ jitter from the arrival times of packets from the MCU.
+
+ Timestamps from the camera that do not having matching timestamps from
+ the ephys recording will be marked as NaN. This can happen when the
+ position tracking is shut off after the ephys recording is done or
+ started before the ephys recording starts.
+
+ Parameters
+ ----------
+ continuous_times: ndarray, shape (2, n_ephys_time)
+ From the continuous time file
+ row 0: trodestime, row 1: adjusted_systime_
+ timestamps: ndarray, shape (n_position_time, )
+ trodes timestamps relative to camera’s timing (from pos_online.dat)
+
+ Returns
+ -------
+ converted_timestamps : ndarray, shape (n_position_time,)
+ Timestamps from the position tracking in terms of the adjusted
+ timestamps. Also converted to seconds.
+
+ '''
+ # Find the matching timestamp index (trodestime)
+ timestamp_ind = np.digitize(timestamps, continuous_times[0, 1:])
+ converted_timestamps = (continuous_times[1, timestamp_ind] /
+ NANOSECONDS_PER_SECOND)
+
+ # Mark timestamps not found in continuous time as NaN
+ not_found = timestamps != continuous_times[0, timestamp_ind]
+ converted_timestamps[not_found] = np.nan
+
return converted_timestamps
diff --git a/rec_to_nwb/processing/tools/data_scanner.py b/rec_to_nwb/processing/tools/data_scanner.py
index cbe744aca..d63a4870f 100644
--- a/rec_to_nwb/processing/tools/data_scanner.py
+++ b/rec_to_nwb/processing/tools/data_scanner.py
@@ -1,7 +1,8 @@
import fnmatch
import os
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
+from rec_to_nwb.processing.exceptions.missing_data_exception import \
+ MissingDataException
from rec_to_nwb.processing.metadata.metadata_manager import MetadataManager
from rec_to_nwb.processing.tools.beartype.beartype import beartype
from rec_to_nwb.processing.tools.dataset import Dataset
@@ -21,37 +22,55 @@ def __init__(self, data_path: str, animal_name: str, nwb_metadata: MetadataManag
@beartype
def get_all_epochs(self, date: str) -> list:
- all_datasets = []
- directories = os.listdir(self.data_path + '/' + self.animal_name + '/preprocessing/' + date)
+ epoch_number_to_epoch = {}
+ directories = os.listdir(
+ os.path.join(
+ self.data_path, self.animal_name, 'preprocessing', date))
FileSorter.sort_filenames(directories)
for directory in directories:
if directory.startswith(date):
- dataset_name = (directory.split('_')[2] + '_' + directory.split('_')[3]).split('.')[0]
- if not dataset_name in all_datasets:
- all_datasets.append(dataset_name)
- return all_datasets
+ epoch_number = directory.split('_')[2]
+ epoch_tag = directory.split('_')[3].split('.')[0]
+ epoch = f'{epoch_number}_{epoch_tag}'
+
+ if epoch_number in epoch_number_to_epoch:
+ # check if the current epoch_tag is consistent
+ warning = f'epoch {epoch_number} is not consistent across files'
+ assert epoch_number_to_epoch[epoch_number] == epoch, warning
+ else:
+ epoch_number_to_epoch[epoch_number] = epoch
+
+ return [epoch_number_to_epoch[epoch_number]
+ for epoch_number in sorted(epoch_number_to_epoch)]
@beartype
def get_all_data_from_dataset(self, date: str) -> list:
- self.__check_if_path_exists(self.data_path + '/' + self.animal_name + '/preprocessing/' + date)
- return os.listdir(self.data_path + '/' + self.animal_name + '/preprocessing/' + date)
+ path = os.path.join(self.data_path, self.animal_name, 'preprocessing',
+ date)
+ self.__check_if_path_exists(path)
+ return os.listdir(path)
@beartype
def extract_data_from_date_folder(self, date: str):
- self.data = {self.animal_name: self.__extract_experiments(self.data_path, self.animal_name, [date])}
+ self.data = {self.animal_name: self.__extract_experiments(
+ self.data_path, self.animal_name, [date])}
@beartype
def extract_data_from_dates_folders(self, dates: list):
- self.data = {self.animal_name: self.__extract_experiments(self.data_path, self.animal_name, dates)}
+ self.data = {self.animal_name: self.__extract_experiments(
+ self.data_path, self.animal_name, dates)}
def extract_data_from_all_dates_folders(self):
- self.data = {self.animal_name: self.__extract_experiments(self.data_path, self.animal_name, None)}
+ self.data = {self.animal_name: self.__extract_experiments(
+ self.data_path, self.animal_name, None)}
def __extract_experiments(self, data_path, animal_name, dates):
- preprocessing_path = data_path + animal_name + '/preprocessing'
+ preprocessing_path = os.path.join(
+ data_path, animal_name, 'preprocessing')
if not dates:
dates = FileSorter.sort_filenames(os.listdir(preprocessing_path))
- return {date: self.__extract_datasets(preprocessing_path + '/' + date) for date in dates}
+ return {date: self.__extract_datasets(
+ os.path.join(preprocessing_path, date)) for date in dates}
@staticmethod
def __extract_datasets(date_path):
@@ -69,7 +88,9 @@ def __extract_datasets(date_path):
existing_datasets.add(dataset_name)
for dataset in datasets.values():
if dataset_name == dataset.name:
- dataset.add_data_to_dataset(date_path + '/' + directory + '/', dir_last_part.pop())
+ dataset.add_data_to_dataset(
+ os.path.join(date_path, directory),
+ dir_last_part.pop())
return datasets
@beartype
@@ -88,8 +109,9 @@ def get_all_datasets(self, animal: str, date: str) -> list:
def get_mda_timestamps(self, animal: str, date: str, dataset: str):
for file in self.data[animal][date][dataset].get_all_data_from_dataset('mda'):
if file.endswith('timestamps.mda'):
- return self.data[animal][date][dataset].get_data_path_from_dataset('mda') + file
- return None
+ return os.path.join(
+ self.data[animal][date][dataset]
+ .get_data_path_from_dataset('mda'), file)
@staticmethod
@beartype
@@ -98,9 +120,10 @@ def get_probes_from_directory(path: str):
files = FileSorter.sort_filenames(os.listdir(path))
for probe_file in files:
if fnmatch.fnmatch(probe_file, "probe*.yml"):
- probes.append(path + '/' + probe_file)
+ probes.append(os.path.join(path, probe_file))
return probes
def __check_if_path_exists(self, path):
- if not (os.path.exists(path)):
- raise MissingDataException('missing ' + self.data_path + ' directory')
+ if not os.path.exists(path):
+ raise MissingDataException(
+ 'missing ' + self.data_path + ' directory')
diff --git a/rec_to_nwb/processing/tools/dataset.py b/rec_to_nwb/processing/tools/dataset.py
index 690db7bed..0451d7d52 100644
--- a/rec_to_nwb/processing/tools/dataset.py
+++ b/rec_to_nwb/processing/tools/dataset.py
@@ -22,11 +22,18 @@ def get_all_data_from_dataset(self, data_type):
def get_mda_timestamps(self):
for file in self.get_all_data_from_dataset('mda'):
if file.endswith('timestamps.mda'):
- return self.get_data_path_from_dataset('mda') + file
+ return os.path.join(
+ self.get_data_path_from_dataset('mda'), file)
+
+ for file in self.get_all_data_from_dataset('mountainsort'):
+ if file.endswith('timestamps.mda'):
+ return os.path.join(
+ self.get_data_path_from_dataset('mountainsort'), file)
return None
def get_continuous_time(self):
for file in self.get_all_data_from_dataset('time'):
if file.endswith('continuoustime.dat'):
- return self.get_data_path_from_dataset('time') + file
- return None
\ No newline at end of file
+ return os.path.join(
+ self.get_data_path_from_dataset('time'), file)
+ return None
diff --git a/rec_to_nwb/processing/tools/file_downloader.py b/rec_to_nwb/processing/tools/file_downloader.py
index 7187714f9..74d8f7695 100644
--- a/rec_to_nwb/processing/tools/file_downloader.py
+++ b/rec_to_nwb/processing/tools/file_downloader.py
@@ -7,12 +7,14 @@
import os
import requests
-
-from rec_to_nwb.processing.tools.abstract_file_downloader import AbstractFileDownloader
+from rec_to_nwb.processing.tools.abstract_file_downloader import \
+ AbstractFileDownloader
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
diff --git a/rec_to_nwb/processing/tools/file_sorter.py b/rec_to_nwb/processing/tools/file_sorter.py
index fd9f1b8f0..cfc6ea775 100644
--- a/rec_to_nwb/processing/tools/file_sorter.py
+++ b/rec_to_nwb/processing/tools/file_sorter.py
@@ -1,6 +1,15 @@
-class FileSorter:
+class FileSorter:
@staticmethod
def sort_filenames(filenames):
- filenames.sort(key=lambda item: (item, len(item)))
+ # Check if these are mda files
+ # There can be a log file along with mda files, so check whether any file is an mda file
+ filenames_mda = [i for i in filenames if '.mda' in i]
+ if len(filenames_mda) > 0: # if at least one mda file
+ # sort by length first because the ntrode numbers are 1,2,.., 10, ...
+ filenames.sort(key=lambda item: (len(item), item))
+ else:
+ filenames.sort(key=lambda item: (item, len(item)))
+ #print('IN FILESORTER') ##########
+ #print('filenames:', filenames) ##############
return filenames
diff --git a/rec_to_nwb/processing/tools/filter_probe_by_type.py b/rec_to_nwb/processing/tools/filter_probe_by_type.py
index d310b08f8..470c7846f 100644
--- a/rec_to_nwb/processing/tools/filter_probe_by_type.py
+++ b/rec_to_nwb/processing/tools/filter_probe_by_type.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.exceptions.invalid_metadata_exception import InvalidMetadataException
+from rec_to_nwb.processing.exceptions.invalid_metadata_exception import \
+ InvalidMetadataException
def filter_probe_by_type(probes_content, device_type):
@@ -6,5 +7,6 @@ def filter_probe_by_type(probes_content, device_type):
if probe_metadata['probe_type'] == device_type:
return probe_metadata
if device_type not in [probe_metadata['probe_type'] for probe_metadata in probes_content]:
- raise InvalidMetadataException('there is not matching device type for metadata electrode_group in probe.yml schemas: ' + str(device_type))
+ raise InvalidMetadataException(
+ 'there is not matching device type for metadata electrode_group in probe.yml schemas: ' + str(device_type))
return None
diff --git a/rec_to_nwb/processing/tools/validate_parameters.py b/rec_to_nwb/processing/tools/validate_parameters.py
index acf848549..511ae255d 100644
--- a/rec_to_nwb/processing/tools/validate_parameters.py
+++ b/rec_to_nwb/processing/tools/validate_parameters.py
@@ -1,5 +1,7 @@
-from rec_to_nwb.processing.exceptions.not_equal_param_length_exception import NotEqualParamLengthException
-from rec_to_nwb.processing.exceptions.none_param_exception import NoneParamException
+from rec_to_nwb.processing.exceptions.none_param_exception import \
+ NoneParamException
+from rec_to_nwb.processing.exceptions.not_equal_param_length_exception import \
+ NotEqualParamLengthException
def validate_parameters_not_none(class_name, *args):
@@ -12,5 +14,6 @@ def validate_parameters_equal_length(class_name, *args):
previous_arg = args[0]
for arg in args:
if len(arg) != len(previous_arg):
- raise NotEqualParamLengthException('Parameters lengths are not equal in ' + class_name)
+ raise NotEqualParamLengthException(
+ 'Parameters lengths are not equal in ' + class_name)
previous_arg = arg
diff --git a/rec_to_nwb/processing/validation/associated_files_validation.py b/rec_to_nwb/processing/validation/associated_files_validation.py
index 00f1c7cec..cdb16d5aa 100644
--- a/rec_to_nwb/processing/validation/associated_files_validation.py
+++ b/rec_to_nwb/processing/validation/associated_files_validation.py
@@ -1,11 +1,14 @@
import logging.config
import os
-from rec_to_nwb.processing.validation.associated_files_validation_summary import AssociatedFilesValidationSummary
+from rec_to_nwb.processing.validation.associated_files_validation_summary import \
+ AssociatedFilesValidationSummary
from rec_to_nwb.processing.validation.validator import Validator
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -16,5 +19,6 @@ def __init__(self, associated_files):
def create_summary(self):
if len(self.associated_files) == 0:
- logger.info("There are no associated_files defined in metadata.yml file.")
+ logger.info(
+ "There are no associated_files defined in metadata.yml file.")
return AssociatedFilesValidationSummary(self.associated_files)
diff --git a/rec_to_nwb/processing/validation/associated_files_validation_summary.py b/rec_to_nwb/processing/validation/associated_files_validation_summary.py
index a27f0c6af..0c17d6cfd 100644
--- a/rec_to_nwb/processing/validation/associated_files_validation_summary.py
+++ b/rec_to_nwb/processing/validation/associated_files_validation_summary.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.validation.validation_summary import ValidationSummary
+from rec_to_nwb.processing.validation.validation_summary import \
+ ValidationSummary
class AssociatedFilesValidationSummary(ValidationSummary):
@@ -8,4 +9,3 @@ def __init__(self, associated_files):
def is_valid(self):
return isinstance(self.associated_files, list)
-
diff --git a/rec_to_nwb/processing/validation/associated_files_validator.py b/rec_to_nwb/processing/validation/associated_files_validator.py
index 8bbbf1a62..3f766949d 100644
--- a/rec_to_nwb/processing/validation/associated_files_validator.py
+++ b/rec_to_nwb/processing/validation/associated_files_validator.py
@@ -8,6 +8,7 @@ def __init__(self, associated_files):
def files_exist(self):
for associated_file in self.associated_files:
+ print(f'Checking associated file {associated_file["path"]}')
if not os.path.isfile(associated_file["path"]):
return False
return True
diff --git a/rec_to_nwb/processing/validation/metadata_section_validator.py b/rec_to_nwb/processing/validation/metadata_section_validator.py
index 4d279ce24..d687b3719 100644
--- a/rec_to_nwb/processing/validation/metadata_section_validator.py
+++ b/rec_to_nwb/processing/validation/metadata_section_validator.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
+from rec_to_nwb.processing.exceptions.missing_data_exception import \
+ MissingDataException
class MetadataSectionValidator:
@@ -7,24 +8,26 @@ def __init__(self, metadata):
self.metadata = metadata
def validate_sections(self):
- if 'experimenter name' not in self.metadata:
- raise MissingDataException('metadata is missing experimenter name')
+ if 'experimenter_name' not in self.metadata:
+ raise MissingDataException('metadata is missing experimenter_name')
if 'lab' not in self.metadata:
raise MissingDataException('metadata is missing lab')
if 'institution' not in self.metadata:
raise MissingDataException('metadata is missing institution')
- if 'experiment description' not in self.metadata:
- raise MissingDataException('metadata is missing experiment description')
- if 'session description' not in self.metadata:
- raise MissingDataException('metadata is missing session description')
+ if 'experiment_description' not in self.metadata:
+ raise MissingDataException(
+ 'metadata is missing experiment_description')
+ if 'session_description' not in self.metadata:
+ raise MissingDataException(
+ 'metadata is missing session_description')
if 'session_id' not in self.metadata:
raise MissingDataException('metadata is missing session_id')
if 'subject' not in self.metadata:
raise MissingDataException('metadata is missing subject')
if 'units' not in self.metadata:
raise MissingDataException('metadata is missing units')
- if 'data acq device' not in self.metadata:
- raise MissingDataException('metadata is missing data acq device')
+ if 'data_acq_device' not in self.metadata:
+ raise MissingDataException('metadata is missing data_acq_device')
if 'cameras' not in self.metadata:
raise MissingDataException('metadata is missing cameras')
if 'tasks' not in self.metadata:
@@ -32,12 +35,15 @@ def validate_sections(self):
if 'associated_files' not in self.metadata:
raise MissingDataException('metadata is missing associated_files')
if 'associated_video_files' not in self.metadata:
- raise MissingDataException('metadata is missing associated_video_files')
+ raise MissingDataException(
+ 'metadata is missing associated_video_files')
if 'times_period_multiplier' not in self.metadata:
- raise MissingDataException('metadata is missing times_period_multiplier')
+ raise MissingDataException(
+ 'metadata is missing times_period_multiplier')
if 'behavioral_events' not in self.metadata:
raise MissingDataException('metadata is missing behavioral_events')
- if 'electrode groups' not in self.metadata:
- raise MissingDataException('metadata is missing electrode groups')
- if 'ntrode electrode group channel map' not in self.metadata:
- raise MissingDataException('metadata is missing ntrode electrode group channel map')
+ if 'electrode_groups' not in self.metadata:
+ raise MissingDataException('metadata is missing electrode_groups')
+ if 'ntrode_electrode_group_channel_map' not in self.metadata:
+ raise MissingDataException(
+ 'metadata is missing ntrode_electrode_group_channel_map')
diff --git a/rec_to_nwb/processing/validation/metadata_validation_summary.py b/rec_to_nwb/processing/validation/metadata_validation_summary.py
index b457b1331..0cea97d4e 100644
--- a/rec_to_nwb/processing/validation/metadata_validation_summary.py
+++ b/rec_to_nwb/processing/validation/metadata_validation_summary.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.validation.validation_summary import ValidationSummary
+from rec_to_nwb.processing.validation.validation_summary import \
+ ValidationSummary
class MetadataValidationSummary(ValidationSummary):
diff --git a/rec_to_nwb/processing/validation/metadata_validator.py b/rec_to_nwb/processing/validation/metadata_validator.py
index d72d825e6..d002f1b78 100644
--- a/rec_to_nwb/processing/validation/metadata_validator.py
+++ b/rec_to_nwb/processing/validation/metadata_validator.py
@@ -1,7 +1,9 @@
import os
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
-from rec_to_nwb.processing.validation.metadata_validation_summary import MetadataValidationSummary
+from rec_to_nwb.processing.exceptions.missing_data_exception import \
+ MissingDataException
+from rec_to_nwb.processing.validation.metadata_validation_summary import \
+ MetadataValidationSummary
from rec_to_nwb.processing.validation.validator import Validator
@@ -15,7 +17,7 @@ class MetadataValidator(Validator):
Methods:
get_missing_metadata()
"""
-
+
def __init__(self, metadata_path, probes_paths):
self.probes_paths = probes_paths
self.metadata_path = metadata_path
@@ -35,7 +37,7 @@ def __get_missing_metadata(self):
Returns:
list of strings: list of all missing yml files
"""
-
+
missing_data = []
if not (os.path.exists(self.metadata_path)):
missing_data.append(self.metadata_path)
diff --git a/rec_to_nwb/processing/validation/not_empty_validation_summary.py b/rec_to_nwb/processing/validation/not_empty_validation_summary.py
index 5a5b339db..1ee80cedb 100644
--- a/rec_to_nwb/processing/validation/not_empty_validation_summary.py
+++ b/rec_to_nwb/processing/validation/not_empty_validation_summary.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.validation.validation_summary import ValidationSummary
+from rec_to_nwb.processing.validation.validation_summary import \
+ ValidationSummary
class NotEmptyValidationSummary(ValidationSummary):
@@ -8,4 +9,3 @@ def __init__(self, parameter):
def is_valid(self):
return bool(self.parameter)
-
diff --git a/rec_to_nwb/processing/validation/not_empty_validator.py b/rec_to_nwb/processing/validation/not_empty_validator.py
index fdc504d8b..1a21e6e71 100644
--- a/rec_to_nwb/processing/validation/not_empty_validator.py
+++ b/rec_to_nwb/processing/validation/not_empty_validator.py
@@ -1,5 +1,7 @@
-from rec_to_nwb.processing.exceptions.none_param_exception import NoneParamException
-from rec_to_nwb.processing.validation.not_empty_validation_summary import NotEmptyValidationSummary
+from rec_to_nwb.processing.exceptions.none_param_exception import \
+ NoneParamException
+from rec_to_nwb.processing.validation.not_empty_validation_summary import \
+ NotEmptyValidationSummary
from rec_to_nwb.processing.validation.validator import Validator
diff --git a/rec_to_nwb/processing/validation/ntrode_validation_summary.py b/rec_to_nwb/processing/validation/ntrode_validation_summary.py
index 95a2b249e..375d6d01b 100644
--- a/rec_to_nwb/processing/validation/ntrode_validation_summary.py
+++ b/rec_to_nwb/processing/validation/ntrode_validation_summary.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.validation.validation_summary import ValidationSummary
+from rec_to_nwb.processing.validation.validation_summary import \
+ ValidationSummary
class NTrodeValidationSummary(ValidationSummary):
@@ -8,4 +9,6 @@ def __init__(self, ntrodes_num, spike_ntrodes_num):
self.spike_ntrodes_num = spike_ntrodes_num
def is_valid(self):
- return self.ntrodes_num > 0 and self.spike_ntrodes_num > 0 and self.ntrodes_num == self.spike_ntrodes_num
+ return (self.ntrodes_num > 0 and
+ self.spike_ntrodes_num > 0 and
+ self.ntrodes_num == self.spike_ntrodes_num)
diff --git a/rec_to_nwb/processing/validation/ntrode_validator.py b/rec_to_nwb/processing/validation/ntrode_validator.py
index f7f6f5ec9..4efeb901b 100644
--- a/rec_to_nwb/processing/validation/ntrode_validator.py
+++ b/rec_to_nwb/processing/validation/ntrode_validator.py
@@ -1,19 +1,27 @@
import logging.config
import os
-from rec_to_nwb.processing.exceptions.invalid_header_exception import InvalidHeaderException
-from rec_to_nwb.processing.exceptions.invalid_metadata_exception import InvalidMetadataException
+from rec_to_nwb.processing.exceptions.invalid_header_exception import \
+ InvalidHeaderException
+from rec_to_nwb.processing.exceptions.invalid_metadata_exception import \
+ InvalidMetadataException
from rec_to_nwb.processing.header.module.header import Header
from rec_to_nwb.processing.tools.beartype.beartype import beartype
-from rec_to_nwb.processing.tools.count_electrodes_in_ntrode import count_electrodes_in_ntrode
-from rec_to_nwb.processing.tools.count_electrodes_in_probe import count_electrodes_in_probe
-from rec_to_nwb.processing.tools.filter_probe_by_type import filter_probe_by_type
-from rec_to_nwb.processing.validation.ntrode_validation_summary import NTrodeValidationSummary
+from rec_to_nwb.processing.tools.count_electrodes_in_ntrode import \
+ count_electrodes_in_ntrode
+from rec_to_nwb.processing.tools.count_electrodes_in_probe import \
+ count_electrodes_in_probe
+from rec_to_nwb.processing.tools.filter_probe_by_type import \
+ filter_probe_by_type
+from rec_to_nwb.processing.validation.ntrode_validation_summary import \
+ NTrodeValidationSummary
from rec_to_nwb.processing.validation.validator import Validator
path = os.path.dirname(os.path.abspath(__file__))
-logging.config.fileConfig(fname=str(path) + '/../../logging.conf', disable_existing_loggers=False)
+logging.config.fileConfig(
+ fname=os.path.join(str(path), os.pardir, os.pardir, 'logging.conf'),
+ disable_existing_loggers=False)
logger = logging.getLogger(__name__)
@@ -26,28 +34,32 @@ def __init__(self, metadata: dict, header: Header, probes_metadata: list):
self.probes_metadata = probes_metadata
def create_summary(self):
- ntrodes = self.metadata['ntrode electrode group channel map']
+ ntrodes = self.metadata['ntrode_electrode_group_channel_map']
if len(ntrodes) == 0:
- raise InvalidMetadataException("There are no ntrodes defined in metadata.yml file.")
+ raise InvalidMetadataException(
+ "There are no ntrodes defined in metadata.yml file.")
if self.header is None or \
self.header.configuration.spike_configuration is None or \
self.header.configuration.spike_configuration.spike_n_trodes is None:
- raise InvalidHeaderException("Rec header does not contain spike_n_trodes data")
+ raise InvalidHeaderException(
+ "Rec header does not contain spike_n_trodes data")
spike_ntrodes = self.header.configuration.spike_configuration.spike_n_trodes
ntrodes_num = len(ntrodes)
spike_ntrodes_num = len(spike_ntrodes)
- self.validate_ntrode_metadata_with_probe_metadata(self.metadata, self.probes_metadata)
+ self.validate_ntrode_metadata_with_probe_metadata(
+ self.metadata, self.probes_metadata)
return NTrodeValidationSummary(ntrodes_num, spike_ntrodes_num)
@staticmethod
def validate_ntrode_metadata_with_probe_metadata(metadata, probes_metadata):
- for electrode_group in metadata['electrode groups']:
- probe_metadata = filter_probe_by_type(probes_metadata, electrode_group['device_type'])
+ for electrode_group in metadata['electrode_groups']:
+ probe_metadata = filter_probe_by_type(
+ probes_metadata, electrode_group['device_type'])
electrodes_in_probe = count_electrodes_in_probe(probe_metadata)
electrodes_in_group = count_electrodes_in_ntrode(
- metadata['ntrode electrode group channel map'],
+ metadata['ntrode_electrode_group_channel_map'],
electrode_group['id']
)
if electrodes_in_probe != electrodes_in_group:
@@ -56,5 +68,6 @@ def validate_ntrode_metadata_with_probe_metadata(metadata, probes_metadata):
'Probe_type: ' + str(electrode_group['device_type']) +
' electrodes in this probe_type: ' + str(electrodes_in_probe) +
'. Ntrode_metadata for electrode_group of id: ' + str(electrode_group['id']) +
- ' electrodes in this electrode_group: ' + str(electrodes_in_group)
+ ' electrodes in this electrode_group: ' +
+ str(electrodes_in_group)
)
diff --git a/rec_to_nwb/processing/validation/path_validation_summary.py b/rec_to_nwb/processing/validation/path_validation_summary.py
index d242dd43d..8bc207b7a 100644
--- a/rec_to_nwb/processing/validation/path_validation_summary.py
+++ b/rec_to_nwb/processing/validation/path_validation_summary.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.validation.validation_summary import ValidationSummary
+from rec_to_nwb.processing.validation.validation_summary import \
+ ValidationSummary
class PathValidationSummary(ValidationSummary):
diff --git a/rec_to_nwb/processing/validation/path_validator.py b/rec_to_nwb/processing/validation/path_validator.py
index 31357bea9..7def661d8 100644
--- a/rec_to_nwb/processing/validation/path_validator.py
+++ b/rec_to_nwb/processing/validation/path_validator.py
@@ -1,6 +1,7 @@
import os
-from rec_to_nwb.processing.validation.path_validation_summary import PathValidationSummary
+from rec_to_nwb.processing.validation.path_validation_summary import \
+ PathValidationSummary
from rec_to_nwb.processing.validation.validator import Validator
@@ -12,4 +13,3 @@ def create_summary(self):
if not os.path.isdir(self.path):
raise NotADirectoryError(self.path + ' is not a directory')
return PathValidationSummary()
-
diff --git a/rec_to_nwb/processing/validation/preprocessing_validation_summary.py b/rec_to_nwb/processing/validation/preprocessing_validation_summary.py
index d03ac07f4..ca62dbdee 100644
--- a/rec_to_nwb/processing/validation/preprocessing_validation_summary.py
+++ b/rec_to_nwb/processing/validation/preprocessing_validation_summary.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.validation.validation_summary import ValidationSummary
+from rec_to_nwb.processing.validation.validation_summary import \
+ ValidationSummary
class PreprocessingValidationSummary(ValidationSummary):
diff --git a/rec_to_nwb/processing/validation/preprocessing_validator.py b/rec_to_nwb/processing/validation/preprocessing_validator.py
index dbfc024fc..f24b6e4ee 100644
--- a/rec_to_nwb/processing/validation/preprocessing_validator.py
+++ b/rec_to_nwb/processing/validation/preprocessing_validator.py
@@ -1,7 +1,9 @@
import os
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
-from rec_to_nwb.processing.validation.preprocessing_validation_summary import PreprocessingValidationSummary
+from rec_to_nwb.processing.exceptions.missing_data_exception import \
+ MissingDataException
+from rec_to_nwb.processing.validation.preprocessing_validation_summary import \
+ PreprocessingValidationSummary
from rec_to_nwb.processing.validation.validator import Validator
@@ -33,7 +35,8 @@ def create_summary(self):
message = ''
if missing_preprocessing_data:
for missing_preprocessing_file in missing_preprocessing_data:
- message += missing_preprocessing_file[0] + ' from epoch ' + missing_preprocessing_file[1] + '\n'
+ message += missing_preprocessing_file[0] + \
+ ' from epoch ' + missing_preprocessing_file[1] + '\n'
raise MissingDataException(message + "are missing")
return PreprocessingValidationSummary(missing_preprocessing_data)
diff --git a/rec_to_nwb/processing/validation/task_validation_summary.py b/rec_to_nwb/processing/validation/task_validation_summary.py
index 900fa9fd2..9dc53006f 100644
--- a/rec_to_nwb/processing/validation/task_validation_summary.py
+++ b/rec_to_nwb/processing/validation/task_validation_summary.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.validation.validation_summary import ValidationSummary
+from rec_to_nwb.processing.validation.validation_summary import \
+ ValidationSummary
class TaskValidationSummary(ValidationSummary):
@@ -8,4 +9,3 @@ def __init__(self, tasks):
def is_valid(self):
return isinstance(self.tasks, list)
-
diff --git a/rec_to_nwb/processing/validation/task_validator.py b/rec_to_nwb/processing/validation/task_validator.py
index 36638d569..e9aaf7aea 100644
--- a/rec_to_nwb/processing/validation/task_validator.py
+++ b/rec_to_nwb/processing/validation/task_validator.py
@@ -1,5 +1,7 @@
-from rec_to_nwb.processing.exceptions.invalid_metadata_exception import InvalidMetadataException
-from rec_to_nwb.processing.validation.task_validation_summary import TaskValidationSummary
+from rec_to_nwb.processing.exceptions.invalid_metadata_exception import \
+ InvalidMetadataException
+from rec_to_nwb.processing.validation.task_validation_summary import \
+ TaskValidationSummary
from rec_to_nwb.processing.validation.validator import Validator
@@ -10,5 +12,6 @@ def __init__(self, tasks):
def create_summary(self):
if len(self.tasks) == 0:
- raise InvalidMetadataException("There are no tasks defined in metadata.yml file.")
+ raise InvalidMetadataException(
+ "There are no tasks defined in metadata.yml file.")
return TaskValidationSummary(self.tasks)
diff --git a/rec_to_nwb/processing/validation/validation_registrator.py b/rec_to_nwb/processing/validation/validation_registrator.py
index e61778760..5e8aaf00c 100644
--- a/rec_to_nwb/processing/validation/validation_registrator.py
+++ b/rec_to_nwb/processing/validation/validation_registrator.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.exceptions.invalid_input_exception import InvalidInputException
+from rec_to_nwb.processing.exceptions.invalid_input_exception import \
+ InvalidInputException
from rec_to_nwb.processing.validation.validator import Validator
@@ -15,4 +16,5 @@ def validate(self):
for validator in self.validators:
result = validator.create_summary()
if not result.is_valid:
- raise InvalidInputException("Validation: " + str(type(validator)) + "has failed!")
+ raise InvalidInputException(
+ "Validation: " + str(type(validator)) + "has failed!")
diff --git a/rec_to_nwb/processing/validation/xml_files_validation.py b/rec_to_nwb/processing/validation/xml_files_validation.py
index 4e2052f5b..97510150d 100644
--- a/rec_to_nwb/processing/validation/xml_files_validation.py
+++ b/rec_to_nwb/processing/validation/xml_files_validation.py
@@ -1,8 +1,11 @@
import os
-from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
-from rec_to_nwb.processing.validation.validation_registrator import ValidationRegistrator
-from rec_to_nwb.processing.validation.xml_files_validation_summary import XmlFilesValidationSummary
+from rec_to_nwb.processing.exceptions.missing_data_exception import \
+ MissingDataException
+from rec_to_nwb.processing.validation.validation_registrator import \
+ ValidationRegistrator
+from rec_to_nwb.processing.validation.xml_files_validation_summary import \
+ XmlFilesValidationSummary
class XmlFilesValidator(ValidationRegistrator):
@@ -11,5 +14,6 @@ def __init__(self, path):
def create_summary(self):
if not os.path.exists(self.path):
- raise MissingDataException('xml file ' + self.path + ' does not exist!')
+ raise MissingDataException(
+ 'xml file ' + self.path + ' does not exist!')
return XmlFilesValidationSummary()
diff --git a/rec_to_nwb/processing/validation/xml_files_validation_summary.py b/rec_to_nwb/processing/validation/xml_files_validation_summary.py
index b65b87b9f..3483d47da 100644
--- a/rec_to_nwb/processing/validation/xml_files_validation_summary.py
+++ b/rec_to_nwb/processing/validation/xml_files_validation_summary.py
@@ -1,4 +1,5 @@
-from rec_to_nwb.processing.validation.validation_summary import ValidationSummary
+from rec_to_nwb.processing.validation.validation_summary import \
+ ValidationSummary
class XmlFilesValidationSummary(ValidationSummary):
diff --git a/rec_to_nwb/scripts/nwb_eseries_timestamps_visualization.py b/rec_to_nwb/scripts/nwb_eseries_timestamps_visualization.py
index d2242d1ba..a189d4167 100644
--- a/rec_to_nwb/scripts/nwb_eseries_timestamps_visualization.py
+++ b/rec_to_nwb/scripts/nwb_eseries_timestamps_visualization.py
@@ -1,13 +1,8 @@
# plots timestamps from nwb file(e-series)
-
-from ndx_franklab_novela.apparatus import Apparatus, Edge, Node
-from ndx_franklab_novela.header_device import HeaderDevice
-from ndx_franklab_novela.ntrode import NTrode
-
import matplotlib.pyplot as plt
from pynwb import NWBHDF5IO
-nwb_file = NWBHDF5IO('rec_to_nwb/rec_to_nwb/test/beans20190718.nwb', 'r')
+nwb_file = NWBHDF5IO('rec_to_nwb/rec_to_nwb/test/beans20190718.nwb', 'r', load_namespaces=True)
nwbfile_read = nwb_file.read()
timestamp = nwbfile_read.acquisition['e-series'].timestamps
diff --git a/rec_to_nwb/scripts/visualise_timestamp_distances.py b/rec_to_nwb/scripts/visualise_timestamp_distances.py
index b8b30d229..ffe0452be 100644
--- a/rec_to_nwb/scripts/visualise_timestamp_distances.py
+++ b/rec_to_nwb/scripts/visualise_timestamp_distances.py
@@ -1,16 +1,16 @@
+import os
from pathlib import Path
+import pandas as pd
+from matplotlib import pyplot
from mountainlab_pytools.mdaio import readmda
from rec_to_binaries.read_binaries import readTrodesExtractedDataFile
-
from rec_to_nwb.processing.metadata.metadata_manager import MetadataManager
-from rec_to_nwb.processing.time.continuous_time_extractor import ContinuousTimeExtractor
+from rec_to_nwb.processing.time.continuous_time_extractor import \
+ ContinuousTimeExtractor
from rec_to_nwb.processing.time.timestamp_converter import TimestampConverter
from rec_to_nwb.processing.tools.data_scanner import DataScanner
-from matplotlib import pyplot
-import pandas as pd
-
path = Path(__file__).parent.parent
path.resolve()
@@ -29,14 +29,16 @@ def get_posonline_data_file(dataset):
all_pos = dataset.get_all_data_from_dataset('pos')
for pos_file in all_pos:
if pos_file.endswith('pos_online.dat'):
- return dataset.get_data_path_from_dataset('pos') + pos_file
+ return os.path.join(dataset.get_data_path_from_dataset('pos'),
+ pos_file)
return None
def extract_datasets(data_scanner, animal_name, date):
data_scanner.extract_data_from_date_folder(date)
dataset_names = data_scanner.get_all_epochs(date)
- return[data_scanner.data[animal_name][date][dataset] for dataset in dataset_names]
+ return [data_scanner.data[animal_name][date][dataset]
+ for dataset in dataset_names]
if __name__ == "__main__":
@@ -53,21 +55,28 @@ def extract_datasets(data_scanner, animal_name, date):
data_scanner = DataScanner(data_path, animal_name, nwb_metadata)
datasets = extract_datasets(data_scanner, animal_name, date)
- pos_timestamps_files = [get_posonline_data_file(dataset) for dataset in datasets]
- mda_timestamps_files = [dataset.get_mda_timestamps() for dataset in datasets]
- continuous_time_files = [dataset.get_continuous_time() for dataset in datasets]
+ pos_timestamps_files = [get_posonline_data_file(
+ dataset) for dataset in datasets]
+ mda_timestamps_files = [dataset.get_mda_timestamps()
+ for dataset in datasets]
+ continuous_time_files = [dataset.get_continuous_time()
+ for dataset in datasets]
- timestamps = [read_pos_timestamps(timestamps_file) for timestamps_file in pos_timestamps_files]
+ timestamps = [read_pos_timestamps(timestamps_file)
+ for timestamps_file in pos_timestamps_files]
continuous_time_extractor = ContinuousTimeExtractor()
- continuous_time_dicts = continuous_time_extractor.get_continuous_time_dict(continuous_time_files)
+ continuous_time_dicts = continuous_time_extractor.get_continuous_time_dict(
+ continuous_time_files)
distances = []
max_distance = 0
for i, continuous_time_dict in enumerate(continuous_time_dicts):
- converted_timestamps = TimestampConverter.convert_timestamps(continuous_time_dict, timestamps[i])
- for j in range(1, len(converted_timestamps) -1):
+ converted_timestamps = TimestampConverter.convert_timestamps(
+ continuous_time_dict, timestamps[i])
+ for j in range(1, len(converted_timestamps) - 1):
if converted_timestamps[j] > 0 and converted_timestamps[j - 1] > 0:
- new_dist = (converted_timestamps[j] - converted_timestamps[j - 1])
+ new_dist = (
+ converted_timestamps[j] - converted_timestamps[j - 1])
if new_dist > max_distance:
max_distance = new_dist
@@ -78,13 +87,16 @@ def extract_datasets(data_scanner, animal_name, date):
max_distance = 0
distances = []
- timestamps = [read_mda_timestamps(timestamps_file) for timestamps_file in mda_timestamps_files]
+ timestamps = [read_mda_timestamps(timestamps_file)
+ for timestamps_file in mda_timestamps_files]
for i, continuous_time_dict in enumerate(continuous_time_dicts):
- converted_timestamps = TimestampConverter.convert_timestamps(continuous_time_dict, timestamps[i])
- for j in range(1, len(converted_timestamps) -1):
+ converted_timestamps = TimestampConverter.convert_timestamps(
+ continuous_time_dict, timestamps[i])
+ for j in range(1, len(converted_timestamps) - 1):
if converted_timestamps[j] > 0 and converted_timestamps[j - 1] > 0:
- new_dist = (converted_timestamps[j] - converted_timestamps[j - 1])
+ new_dist = (
+ converted_timestamps[j] - converted_timestamps[j - 1])
if new_dist > max_distance:
max_distance = new_dist
diff --git a/rec_to_nwb/environment.yml b/rec_to_nwb/test.yml
similarity index 87%
rename from rec_to_nwb/environment.yml
rename to rec_to_nwb/test.yml
index a52499d6e..ba97cb932 100644
--- a/rec_to_nwb/environment.yml
+++ b/rec_to_nwb/test.yml
@@ -1,10 +1,12 @@
-name: rec_to_nwb
+name: test
channels:
- conda-forge
- defaults
+ - franklab
- novelakrk
dependencies:
- python>=3.6,<3.8
+ - rec_to_binaries
- pip
- hdmf
- pynwb
@@ -46,10 +48,13 @@ dependencies:
- pytest
- testfixtures
- ndx-franklab-novela=0.0.011
+ - jupyterlab
+ - nb_conda
- pip:
- mountainlab_pytools
- xmldiff
- - rec-to-binaries==0.6.1.dev0
+ - hdmf>2.1.0
+ - pynwb
# Docs
- recommonmark
diff --git a/rec_to_nwb/test/e2etests/test_nwbFullGeneration.py b/rec_to_nwb/test/e2etests/test_nwbFullGeneration.py
index 0a31cab5d..f84abb063 100644
--- a/rec_to_nwb/test/e2etests/test_nwbFullGeneration.py
+++ b/rec_to_nwb/test/e2etests/test_nwbFullGeneration.py
@@ -17,15 +17,15 @@ class TestNwbFullGeneration(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.metadata = MetadataManager(
- str(path) + '/processing/res/metadata.yml',
- [str(path) + '/processing/res/probe1.yml',
- str(path) + '/processing/res/probe2.yml',
- str(path) + '/processing/res/probe3.yml'])
+ 'C:/Users/wmery/PycharmProjects/rec_to_nwb/rec_to_nwb/test/test_data/KF2/raw/20170120/kibbles20170216_metadata.yml',
+ ['C:/Users/wmery/PycharmProjects/rec_to_nwb/rec_to_nwb/test/test_data/KF2/raw/20170120/64c-3s6mm6cm-20um-40um-sl.yml',
+ 'C:/Users/wmery/PycharmProjects/rec_to_nwb/rec_to_nwb/test/test_data/KF2/raw/20170120/64c-4s6mm6cm-20um-40um-dl.yml'
+ ])
cls.nwb_builder = NWBFileBuilder(
data_path=str(path) + '/test_data/',
- animal_name='beans',
- date='20190718',
+ animal_name='KF2',
+ date='20170120',
nwb_metadata=cls.metadata,
process_dio=True,
process_mda=True,
@@ -34,19 +34,19 @@ def setUpClass(cls):
video_path=str(path) + '/test_data'
)
- @unittest.skip("NWB file creation")
+ #@unittest.skip("NWB file creation")
def test_nwb_file_builder_generate_nwb(self):
content = self.nwb_builder.build()
self.nwb_builder.write(content)
self.nwb_builder.build_and_append_to_nwb(
- process_mda_valid_time=True,
- process_mda_invalid_time=True,
- process_pos_valid_time=True,
- process_pos_invalid_time=True
+ process_mda_valid_time=False,
+ process_mda_invalid_time=False,
+ process_pos_valid_time=False,
+ process_pos_invalid_time=False
)
self.assertIsNotNone(self.nwb_builder)
- @unittest.skip("read created NWB")
+ #@unittest.skip("read created NWB")
def test_nwb_file_builder_read_nwb(self):
with NWBHDF5IO(self.nwb_builder.output_file, 'r') as nwb_file:
content = nwb_file.read()
@@ -76,8 +76,8 @@ def test_nwb_file_builder_failed_due_to_None_parameter(self):
process_analog=True
)
- @classmethod
- def tearDownClass(cls):
- del cls.nwb_builder
- if os.path.isfile('output.nwb'):
- os.remove('output.nwb')
+ # @classmethod
+ # def tearDownClass(cls):
+ # del cls.nwb_builder
+ # if os.path.isfile('output.nwb'):
+ # os.remove('output.nwb')
diff --git a/rec_to_nwb/test/e2etests/test_rawToNwbGeneration.py b/rec_to_nwb/test/e2etests/test_rawToNwbGeneration.py
index 9b6e6ee8f..bb4d1a1dc 100644
--- a/rec_to_nwb/test/e2etests/test_rawToNwbGeneration.py
+++ b/rec_to_nwb/test/e2etests/test_rawToNwbGeneration.py
@@ -8,25 +8,23 @@
path = os.path.dirname(os.path.abspath(__file__))
-_DEFAULT_TRODES_REC_EXPORT_ARGS = ('-reconfig', str(path) + '/../processing/res/reconfig_header.xml')
+_DEFAULT_TRODES_REC_EXPORT_ARGS = ('-reconfig', 'C:/Users/wmery/PycharmProjects/rec_to_nwb/rec_to_nwb/test/test_data/KF2/raw/20170120/kf2_reconfig.xml')
-@unittest.skip("Super heavy RAW to NWB Generation")
+#@unittest.skip("Super heavy RAW to NWB Generation")
class TestRawToNWBGeneration(unittest.TestCase):
def setUp(self):
self.metadata = MetadataManager(
- str(path) + '/../processing/res/metadata.yml',
+ 'C:/Users/wmery/PycharmProjects/rec_to_nwb/rec_to_nwb/test/test_data/KF2/raw/20170120/kibbles20170216_metadata.yml',
[
- str(path) + '/../processing/res/probe1.yml',
- str(path) + '/../processing/res/probe2.yml',
- str(path) + '/../processing/res/probe3.yml'
- ]
- )
+ 'C:/Users/wmery/PycharmProjects/rec_to_nwb/rec_to_nwb/test/test_data/KF2/raw/20170120/64c-3s6mm6cm-20um-40um-sl.yml',
+ 'C:/Users/wmery/PycharmProjects/rec_to_nwb/rec_to_nwb/test/test_data/KF2/raw/20170120/64c-4s6mm6cm-20um-40um-dl.yml'
+ ])
self.builder = RawToNWBBuilder(
- animal_name='beans',
+ animal_name='KF2',
data_path=str(path) + '/../test_data/',
- dates=['20190718'],
+ dates=['20170120'],
nwb_metadata=self.metadata,
output_path='',
video_path=str(path) + '/../test_data',
@@ -41,10 +39,10 @@ def setUp(self):
def test_from_raw_to_nwb_generation(self):
self.builder.build_nwb(
- process_mda_valid_time=True,
- process_mda_invalid_time=True,
- process_pos_valid_time=True,
- process_pos_invalid_time=True
+ process_mda_valid_time=False,
+ process_mda_invalid_time=False,
+ process_pos_valid_time=False,
+ process_pos_invalid_time=False
)
self.assertTrue(os.path.exists('beans20190718.nwb'), 'NWBFile did not build')
@@ -75,5 +73,5 @@ def test_raw_to_nwb_builder_failed_due_to_incorrect_type_parameters(self):
trodes_rec_export_args=_DEFAULT_TRODES_REC_EXPORT_ARGS
)
- def tearDown(self):
- self.builder.cleanup()
+ # def tearDown(self):
+ # self.builder.cleanup()
diff --git a/rec_to_nwb/test/processing/associated_files/test_associatedFilesCreator.py b/rec_to_nwb/test/processing/associated_files/test_associatedFilesCreator.py
index 1b47926df..fe011555c 100644
--- a/rec_to_nwb/test/processing/associated_files/test_associatedFilesCreator.py
+++ b/rec_to_nwb/test/processing/associated_files/test_associatedFilesCreator.py
@@ -1,7 +1,7 @@
import os
import unittest
-from ndx_franklab_novela.associated_files import AssociatedFiles
+from ndx_franklab_novela import AssociatedFiles
from rec_to_nwb.processing.nwb.components.associated_files.associated_files_creator import AssociatedFilesCreator
from rec_to_nwb.processing.nwb.components.associated_files.fl_associated_file import FlAssociatedFile
diff --git a/rec_to_nwb/test/processing/device/probe/shank/test_flShankManager.py b/rec_to_nwb/test/processing/device/probe/shank/test_flShankManager.py
index f044674d0..9bf8819ad 100644
--- a/rec_to_nwb/test/processing/device/probe/shank/test_flShankManager.py
+++ b/rec_to_nwb/test/processing/device/probe/shank/test_flShankManager.py
@@ -2,7 +2,7 @@
from unittest.mock import Mock
from testfixtures import should_raise
-from ndx_franklab_novela.probe import ShanksElectrode
+from ndx_franklab_novela import ShanksElectrode
from rec_to_nwb.processing.exceptions.missing_data_exception import MissingDataException
from rec_to_nwb.processing.nwb.components.device.probe.shanks.fl_shank import FlShank
@@ -68,7 +68,7 @@ def test_fl_shank_manager_create_fl_shanks_dict_successfully(self):
self.assertEqual(len(fl_shanks_dict), 2)
self.assertEqual(len(fl_shanks_dict[probes_metadata_1['probe_type']]), 1)
self.assertEqual(len(fl_shanks_dict[probes_metadata_2['probe_type']]), 3)
-
+
self.assertEqual(fl_shanks_dict[probes_metadata_1['probe_type']][0].shank_id, 0)
self.assertEqual(fl_shanks_dict[probes_metadata_1['probe_type']][0].shanks_electrodes,[
mock_shanks_electrode_1, mock_shanks_electrode_2
@@ -176,4 +176,3 @@ def test_fl_shank_manager_failed_creating_fl_shanks_dict_due_to_None_param(self)
electrode_groups_metadata=Mock(spec=list)
)
fl_shank_manager.get_fl_shanks_dict(None)
-
diff --git a/rec_to_nwb/test/processing/device/probe/test_flProbeManager.py b/rec_to_nwb/test/processing/device/probe/test_flProbeManager.py
index 474b59c8f..d8ed06f47 100644
--- a/rec_to_nwb/test/processing/device/probe/test_flProbeManager.py
+++ b/rec_to_nwb/test/processing/device/probe/test_flProbeManager.py
@@ -3,7 +3,7 @@
from unittest.mock import Mock
from testfixtures import should_raise
-from ndx_franklab_novela.probe import Shank
+from ndx_franklab_novela import Shank
from rec_to_nwb.processing.nwb.components.device.probe.fl_probe import FlProbe
from rec_to_nwb.processing.nwb.components.device.probe.fl_probe_manager import FlProbeManager
diff --git a/rec_to_nwb/test/processing/device/test_deviceFactory.py b/rec_to_nwb/test/processing/device/test_deviceFactory.py
index d13646735..ad04eae48 100644
--- a/rec_to_nwb/test/processing/device/test_deviceFactory.py
+++ b/rec_to_nwb/test/processing/device/test_deviceFactory.py
@@ -1,9 +1,7 @@
from unittest import TestCase
from unittest.mock import Mock
-from ndx_franklab_novela.camera_device import CameraDevice
-from ndx_franklab_novela.header_device import HeaderDevice
-from ndx_franklab_novela.probe import Probe, Shank
+from ndx_franklab_novela import CameraDevice, HeaderDevice, Probe, Shank
from pynwb.device import Device
from testfixtures import should_raise
@@ -22,11 +20,11 @@ class TestDeviceFactory(TestCase):
def test_device_factory_create_Device_successfully(self):
mock_fl_device = Mock(spec=FlDevice)
mock_fl_device.name = 'Device1'
-
+
device = DeviceFactory.create_device(
fl_device=mock_fl_device
)
-
+
self.assertIsNotNone(device)
self.assertIsInstance(device, Device)
@@ -97,11 +95,11 @@ def test_device_factory_create_Probe_successfully(self):
mock_fl_probe.contact_side_numbering = True
mock_fl_probe.contact_size = 20.0
mock_fl_probe.shanks = [mock_shank_1, mock_shank_2]
-
+
probe = DeviceFactory.create_probe(
fl_probe=mock_fl_probe
)
-
+
self.assertIsNotNone(probe)
self.assertIsInstance(probe, Probe)
@@ -218,4 +216,4 @@ def test_device_factory_failed_creating_Probe_due_to_none_param_in_FlProbe(self)
DeviceFactory.create_header_device(
fl_header_device=mock_fl_header_device
- )
\ No newline at end of file
+ )
diff --git a/rec_to_nwb/test/processing/device/test_deviceInjector.py b/rec_to_nwb/test/processing/device/test_deviceInjector.py
index 8cead0693..2c84a1bc7 100644
--- a/rec_to_nwb/test/processing/device/test_deviceInjector.py
+++ b/rec_to_nwb/test/processing/device/test_deviceInjector.py
@@ -4,9 +4,7 @@
from testfixtures import should_raise
from dateutil.tz import tzlocal
-from ndx_franklab_novela.data_acq_device import DataAcqDevice
-from ndx_franklab_novela.header_device import HeaderDevice
-from ndx_franklab_novela.probe import Probe
+from ndx_franklab_novela import DataAcqDevice, HeaderDevice, Probe
from pynwb import NWBFile
from pynwb.device import Device
@@ -53,13 +51,13 @@ def setUpClass(cls):
cls.probe_2.id = 2
cls.probe_2.contact_size = 25.0
cls.probe_2.num_shanks = 30
-
+
cls.data_acq_device_0 = Mock(spec=DataAcqDevice)
cls.data_acq_device_0.name = 'Acq_0'
cls.data_acq_device_0.system = 'system_0'
cls.data_acq_device_0.amplifier = 'amplifier_0'
- cls.data_acq_device_0.adc_circuit = 'adc_circuit_0'
-
+ cls.data_acq_device_0.adc_circuit = 'adc_circuit_0'
+
cls.data_acq_device_1 = Mock(spec=DataAcqDevice)
cls.data_acq_device_1.name = 'Acq_1'
cls.data_acq_device_1.system = 'system_1'
@@ -73,7 +71,7 @@ def setUpClass(cls):
cls.device_2.name = 'Device_2'
cls.probes_dict = {'Probe_1': cls.probe_1, 'Probe_2': cls.probe_2}
-
+
cls.data_acq_device_dict = {'Acq_0': cls.data_acq_device_0, 'Acq_1': cls.data_acq_device_1}
cls.header_device_dict = {'HeaderDevice_1': cls.header_device_1}
@@ -124,8 +122,8 @@ def test_injector_inject_probes_to_nwb_successfully(self):
self.assertEqual(self.nwb_content.devices['Probe_2'].id, 2)
self.assertEqual(self.nwb_content.devices['Probe_2'].name, 'Probe_2')
self.assertEqual(self.nwb_content.devices['Probe_2'].num_shanks, 30)
- self.assertEqual(self.nwb_content.devices['Probe_2'].contact_size, 25.0)
-
+ self.assertEqual(self.nwb_content.devices['Probe_2'].contact_size, 25.0)
+
def test_injector_inject_data_acq_device_to_nwb_successfully(self):
self.device_injector.inject_all_devices(
nwb_content=self.nwb_content,
@@ -139,8 +137,8 @@ def test_injector_inject_data_acq_device_to_nwb_successfully(self):
self.assertEqual(self.nwb_content.devices['Acq_0'].name, 'Acq_0')
self.assertEqual(self.nwb_content.devices['Acq_0'].system, 'system_0')
self.assertEqual(self.nwb_content.devices['Acq_0'].amplifier, 'amplifier_0')
- self.assertEqual(self.nwb_content.devices['Acq_0'].adc_circuit, 'adc_circuit_0')
-
+ self.assertEqual(self.nwb_content.devices['Acq_0'].adc_circuit, 'adc_circuit_0')
+
self.assertIsInstance(self.nwb_content.devices['Acq_1'], DataAcqDevice)
self.assertEqual(self.nwb_content.devices['Acq_1'].name, 'Acq_1')
self.assertEqual(self.nwb_content.devices['Acq_1'].system, 'system_1')
diff --git a/rec_to_nwb/test/processing/device/test_header_device_manager.py b/rec_to_nwb/test/processing/device/test_header_device_manager.py
index fb340873a..1340f8e50 100644
--- a/rec_to_nwb/test/processing/device/test_header_device_manager.py
+++ b/rec_to_nwb/test/processing/device/test_header_device_manager.py
@@ -1,7 +1,7 @@
from unittest import TestCase
from unittest.mock import Mock
-from ndx_franklab_novela.header_device import HeaderDevice
+from ndx_franklab_novela import HeaderDevice
from rec_to_nwb.processing.header.module.global_configuration import GlobalConfiguration
from rec_to_nwb.processing.nwb.components.device.device_factory import DeviceFactory
@@ -136,4 +136,3 @@ def test_header_device_manager_create_HeaderDevice_with_default_values_successfu
self.assertEqual(header_device.commit_head, 'Sample commit_head')
self.assertEqual(header_device.system_time_at_creation, 'Sample system_time_at_creation')
self.assertEqual(header_device.file_path, 'Sample file_path')
-
diff --git a/rec_to_nwb/test/processing/dio/test_dioInjector.py b/rec_to_nwb/test/processing/dio/test_dioInjector.py
index 77f4020aa..b3a771cbe 100644
--- a/rec_to_nwb/test/processing/dio/test_dioInjector.py
+++ b/rec_to_nwb/test/processing/dio/test_dioInjector.py
@@ -18,13 +18,13 @@ class TestDioManager(unittest.TestCase):
def setUp(self):
self.nwb_content = NWBFile(
- session_description='session description',
- experimenter='experimenter name',
+ session_description='session_description',
+ experimenter='experimenter_name',
lab='lab',
institution='institution',
session_start_time=start_time,
identifier='identifier',
- experiment_description='experiment description')
+ experiment_description='experiment_description')
processing_module = ProcessingModule(name='test_processing_module_name', description='test_description')
self.nwb_content.add_processing_module(processing_module)
diff --git a/rec_to_nwb/test/processing/electrode_group/test_electrodeGroupFactory.py b/rec_to_nwb/test/processing/electrode_group/test_electrodeGroupFactory.py
index b169e8e1e..c9969512a 100644
--- a/rec_to_nwb/test/processing/electrode_group/test_electrodeGroupFactory.py
+++ b/rec_to_nwb/test/processing/electrode_group/test_electrodeGroupFactory.py
@@ -1,7 +1,7 @@
from unittest import TestCase
from unittest.mock import Mock
-from ndx_franklab_novela.probe import Probe
+from ndx_franklab_novela import Probe
from pynwb.device import Device
from testfixtures import should_raise
@@ -31,7 +31,7 @@ def test_electrode_group_factory_create_ElectrodeGroup_successfully(self):
electrode_group_1 = ElectrodeGroupFactory.create_electrode_group(mock_fl_electrode_group_1)
electrode_group_2 = ElectrodeGroupFactory.create_electrode_group(mock_fl_electrode_group_2)
-
+
self.assertIsNotNone(electrode_group_1)
self.assertIsNotNone(electrode_group_2)
self.assertEqual(electrode_group_1.name, "0")
@@ -125,4 +125,4 @@ def test_electrode_group_factory_failed_creating_NwbElectrodeGroup_due_to_lack_o
mock_fl_nwb_electrode_group_1.targeted_z = None
mock_fl_nwb_electrode_group_1.units = None
- ElectrodeGroupFactory.create_nwb_electrode_group(mock_fl_nwb_electrode_group_1)
\ No newline at end of file
+ ElectrodeGroupFactory.create_nwb_electrode_group(mock_fl_nwb_electrode_group_1)
diff --git a/rec_to_nwb/test/processing/electrode_group/test_flElectrodeGroupManager.py b/rec_to_nwb/test/processing/electrode_group/test_flElectrodeGroupManager.py
index 7fc7d6335..6257dc03b 100644
--- a/rec_to_nwb/test/processing/electrode_group/test_flElectrodeGroupManager.py
+++ b/rec_to_nwb/test/processing/electrode_group/test_flElectrodeGroupManager.py
@@ -3,7 +3,7 @@
from unittest.mock import Mock
from testfixtures import should_raise
-from ndx_franklab_novela.probe import Probe
+from ndx_franklab_novela import Probe
from rec_to_nwb.processing.nwb.components.electrode_group.fl_electrode_group_manager import FlElectrodeGroupManager
from rec_to_nwb.processing.nwb.components.electrode_group.fl_electrode_group import FlElectrodeGroup
diff --git a/rec_to_nwb/test/processing/electrode_group/test_flNwbElectrodeGroupManager.py b/rec_to_nwb/test/processing/electrode_group/test_flNwbElectrodeGroupManager.py
index ab7adddd0..51979b14d 100644
--- a/rec_to_nwb/test/processing/electrode_group/test_flNwbElectrodeGroupManager.py
+++ b/rec_to_nwb/test/processing/electrode_group/test_flNwbElectrodeGroupManager.py
@@ -3,7 +3,7 @@
from unittest.mock import Mock
from testfixtures import should_raise
-from ndx_franklab_novela.probe import Probe
+from ndx_franklab_novela import Probe
from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group import FlNwbElectrodeGroup
from rec_to_nwb.processing.nwb.components.electrode_group.fl_nwb_electrode_group_manager import FlNwbElectrodeGroupManager
diff --git a/rec_to_nwb/test/processing/electrodes/extension/test_flElectrodeExtensionManager.py b/rec_to_nwb/test/processing/electrodes/extension/test_flElectrodeExtensionManager.py
index 3e736347d..db14c6606 100644
--- a/rec_to_nwb/test/processing/electrodes/extension/test_flElectrodeExtensionManager.py
+++ b/rec_to_nwb/test/processing/electrodes/extension/test_flElectrodeExtensionManager.py
@@ -2,13 +2,14 @@
from unittest import TestCase
from unittest.mock import Mock
-from testfixtures import should_raise
-
-from rec_to_nwb.processing.exceptions.not_compatible_metadata import NotCompatibleMetadata
+from rec_to_nwb.processing.exceptions.not_compatible_metadata import \
+ NotCompatibleMetadata
from rec_to_nwb.processing.header.module.header import Header
-from rec_to_nwb.processing.nwb.components.electrodes.extension.fl_electrode_extension import FlElectrodeExtension
+from rec_to_nwb.processing.nwb.components.electrodes.extension.fl_electrode_extension import \
+ FlElectrodeExtension
from rec_to_nwb.processing.nwb.components.electrodes.extension.fl_electrode_extension_manager import \
FlElectrodeExtensionManager
+from testfixtures import should_raise
path = os.path.dirname(os.path.abspath(__file__))
@@ -45,39 +46,54 @@ def test_electrode_extension_manager_get_fl_electrode_extension_successfully(sel
]}
]
metadata = {
- 'electrode groups': [
- {'id': '0', 'location': 'mPFC', 'device_type': 'tetrode_12.5', 'description': 'Probe 1'},
+ 'electrode_groups': [
+ {'id': '0', 'location': 'mPFC',
+ 'device_type': 'tetrode_12.5', 'description': 'Probe 1'},
{'id': '1', 'location': 'mPFC', 'device_type': '128c-4s8mm6cm-20um-40um-sl', 'description': 'Probe 2'}],
- 'ntrode electrode group channel map': [
- {'ntrode_id': '1', 'probe_id': '0', 'bad_channels': ['0', '2'], 'map': {'0': '0', '1': '1', '2': '2'}},
- {'ntrode_id': '2', 'probe_id': '0', 'bad_channels': ['0'], 'map': {'0': '32', '1': '33', '2': '34'}},
- {'ntrode_id': '3', 'probe_id': '1', 'bad_channels': ['0', '1'], 'map': {'0': '64', '1': '65', '2': '66'}},
- {'ntrode_id': '4', 'probe_id': '1', 'bad_channels': ['0', '2'], 'map': {'0': '96', '1': '97', '2': '98'}}
+ 'ntrode_electrode_group_channel_map': [
+ {'ntrode_id': '1', 'probe_id': '0', 'bad_channels': [
+ '0', '2'], 'map': {'0': '0', '1': '1', '2': '2'}},
+ {'ntrode_id': '2', 'probe_id': '0', 'bad_channels': [
+ '0'], 'map': {'0': '32', '1': '33', '2': '34'}},
+ {'ntrode_id': '3', 'probe_id': '1', 'bad_channels': [
+ '0', '1'], 'map': {'0': '64', '1': '65', '2': '66'}},
+ {'ntrode_id': '4', 'probe_id': '1', 'bad_channels': [
+ '0', '2'], 'map': {'0': '96', '1': '97', '2': '98'}}
]
}
- header = Header(str(path) + '/../../res/electrodes_extensions/header.xml')
- mock_electrodes_valid_map = [False, True, False, False, True, True, False, False, True, False, True, False]
+ header = Header(
+ str(path) + '/../../res/electrodes_extensions/header.xml')
+ mock_electrodes_valid_map = [
+ False, True, False, False, True, True, False, False, True, False, True, False]
fl_electrode_extension_manager = FlElectrodeExtensionManager(
probes_metadata=probes_metadata,
metadata=metadata,
header=header,
)
- fl_electrode_extension = fl_electrode_extension_manager.get_fl_electrodes_extension(mock_electrodes_valid_map)
+ fl_electrode_extension = fl_electrode_extension_manager.get_fl_electrodes_extension(
+ mock_electrodes_valid_map)
self.assertIsInstance(fl_electrode_extension, FlElectrodeExtension)
- self.assertEqual(fl_electrode_extension.rel_x, [0.0, 0.0, 40.0, 0.0, 0.0])
- self.assertEqual(fl_electrode_extension.rel_y, [0.0, 0.0, 0.0, 600.0, 900.0])
- self.assertEqual(fl_electrode_extension.rel_z, [0.0, 0.0, 0.0, 0.0, 0.0])
+ self.assertEqual(fl_electrode_extension.rel_x,
+ [0.0, 0.0, 40.0, 0.0, 0.0])
+ self.assertEqual(fl_electrode_extension.rel_y,
+ [0.0, 0.0, 0.0, 600.0, 900.0])
+ self.assertEqual(fl_electrode_extension.rel_z,
+ [0.0, 0.0, 0.0, 0.0, 0.0])
self.assertEqual(fl_electrode_extension.hw_chan[0], 85)
self.assertEqual(fl_electrode_extension.hw_chan[-1], 102)
self.assertEqual(fl_electrode_extension.ntrode_id, [1, 2, 2, 3, 4])
self.assertEqual(fl_electrode_extension.channel_id, [1, 1, 2, 2, 1])
- self.assertEqual(fl_electrode_extension.probe_shank, ['0', '0', '0', '2', '3'])
- self.assertEqual(fl_electrode_extension.bad_channels, [False, False, False, False, False])
- self.assertEqual(fl_electrode_extension.probe_electrode, ['1', '0', '1', '64', '96'])
- self.assertEqual(fl_electrode_extension.ref_elect_id, [-1, 2, 2, 34, 34])
+ self.assertEqual(fl_electrode_extension.probe_shank,
+ ['0', '0', '0', '2', '3'])
+ self.assertEqual(fl_electrode_extension.bad_channels,
+ [False, False, False, False, False])
+ self.assertEqual(fl_electrode_extension.probe_electrode, [
+ '1', '0', '1', '64', '96'])
+ self.assertEqual(fl_electrode_extension.ref_elect_id,
+ [-1, 2, 2, 34, 34])
@should_raise(NotCompatibleMetadata)
def test_electrode_extension_manager_failed_due_to_not_equal_extensions_length(self):
@@ -110,18 +126,24 @@ def test_electrode_extension_manager_failed_due_to_not_equal_extensions_length(s
]}
]
metadata = {
- 'electrode groups': [
- {'id': '0', 'location': 'mPFC', 'device_type': 'tetrode_12.5', 'description': 'Probe 1'},
+ 'electrode_groups': [
+ {'id': '0', 'location': 'mPFC',
+ 'device_type': 'tetrode_12.5', 'description': 'Probe 1'},
{'id': '1', 'location': 'mPFC', 'device_type': '128c-4s8mm6cm-20um-40um-sl', 'description': 'Probe 2'}],
- 'ntrode electrode group channel map': [
- {'ntrode_id': '1', 'probe_id': '0', 'bad_channels': ['0', '2'], 'map': {'0': '0', '1': '1', '2': '2', '3': '3', '4': '4'}},
- {'ntrode_id': '2', 'probe_id': '0', 'bad_channels': ['0', '3'], 'map': {'0': '32', '1': '33', '2': '34', '3': '35', '4': '36'}},
- {'ntrode_id': '3', 'probe_id': '1', 'bad_channels': ['0', '1'], 'map': {'0': '64', '1': '65', '2': '66', '3': '67', '4': '68'}},
- {'ntrode_id': '4', 'probe_id': '1', 'bad_channels': ['0', '2', '3'], 'map': {'0': '96', '1': '97', '2': '98', '3': '99', '4': '100'}}
+ 'ntrode_electrode_group_channel_map': [
+ {'ntrode_id': '1', 'probe_id': '0', 'bad_channels': ['0', '2'], 'map': {
+ '0': '0', '1': '1', '2': '2', '3': '3', '4': '4'}},
+ {'ntrode_id': '2', 'probe_id': '0', 'bad_channels': ['0', '3'], 'map': {
+ '0': '32', '1': '33', '2': '34', '3': '35', '4': '36'}},
+ {'ntrode_id': '3', 'probe_id': '1', 'bad_channels': ['0', '1'], 'map': {
+ '0': '64', '1': '65', '2': '66', '3': '67', '4': '68'}},
+ {'ntrode_id': '4', 'probe_id': '1', 'bad_channels': ['0', '2', '3'], 'map': {
+ '0': '96', '1': '97', '2': '98', '3': '99', '4': '100'}}
]
}
- header = Header(str(path) + '/../../res/nwb_elements_builder_test/header.xml')
+ header = Header(
+ str(path) + '/../../res/nwb_elements_builder_test/header.xml')
mock_electrodes_valid_map = [
True, False, True, False,
False, True, False, False,
@@ -135,7 +157,8 @@ def test_electrode_extension_manager_failed_due_to_not_equal_extensions_length(s
metadata=metadata,
header=header,
)
- fl_electrode_extension_manager.get_fl_electrodes_extension(mock_electrodes_valid_map)
+ fl_electrode_extension_manager.get_fl_electrodes_extension(
+ mock_electrodes_valid_map)
@should_raise(TypeError)
def test_electrode_extension_manager_failed_due_to_None_param(self):
diff --git a/rec_to_nwb/test/processing/metadata/test_corruptedDataManager.py b/rec_to_nwb/test/processing/metadata/test_corruptedDataManager.py
index 1fa5995f8..36a5d37c4 100644
--- a/rec_to_nwb/test/processing/metadata/test_corruptedDataManager.py
+++ b/rec_to_nwb/test/processing/metadata/test_corruptedDataManager.py
@@ -10,7 +10,7 @@ class TestCorruptedDataManager(TestCase):
def test_corrupted_data_manager_get_valid_map_dict_successfully(self):
metadata = {
- 'ntrode electrode group channel map': [
+ 'ntrode_electrode_group_channel_map': [
{'ntrode_id': 1, 'electrode_group_id': 0, 'bad_channels': [1],
'map': {0: 0, 1: 1}},
{'ntrode_id': 2, 'electrode_group_id': 0, 'bad_channels': [1],
@@ -28,7 +28,7 @@ def test_corrupted_data_manager_get_valid_map_dict_successfully(self):
{'ntrode_id': 8, 'electrode_group_id': 3, 'bad_channels': [0, 1],
'map': {0: 14, 1: 15}}
],
- 'electrode groups': [
+ 'electrode_groups': [
{'id': 0, 'location': 'mPFC', 'device_type': 'tetrode_12.5',
'description': 'Probe 1'},
{'id': 1, 'location': 'mPFC', 'device_type': '128c-4s8mm6cm-20um-40um-sl',
@@ -79,11 +79,11 @@ def test_corrupted_data_manager_get_valid_map_dict_failed_due_to_bad_type_param(
@should_raise(CorruptedDataException)
def test_corrupted_data_manager_get_valid_map_dict_end_nbw_building_process_due_to_lack_of_good_data(self):
metadata = {
- 'ntrode electrode group channel map': [
+ 'ntrode_electrode_group_channel_map': [
{'ntrode_id': 1, 'electrode_group_id': 0, 'bad_channels': [0, 1], 'map': {0: 0, 1: 1}},
{'ntrode_id': 2, 'electrode_group_id': 1, 'bad_channels': [0, 1], 'map': {0: 2, 1: 3}},
],
- 'electrode groups': [
+ 'electrode_groups': [
{'id': 0, 'location': 'mPFC', 'device_type': 'tetrode_12.5',
'description': 'Probe 1'},
{'id': 1, 'location': 'mPFC', 'device_type': '128c-4s8mm6cm-20um-40um-sl',
diff --git a/rec_to_nwb/test/processing/metadata/test_metadataManager.py b/rec_to_nwb/test/processing/metadata/test_metadataManager.py
index 0b876dc70..cb45a77b0 100644
--- a/rec_to_nwb/test/processing/metadata/test_metadataManager.py
+++ b/rec_to_nwb/test/processing/metadata/test_metadataManager.py
@@ -21,17 +21,17 @@ def test_metadata_manager_reading_metadata_successfully(self):
)
metadata_fields = nwb_metadata.metadata.keys()
- self.assertIn('experimenter name', metadata_fields)
+ self.assertIn('experimenter_name', metadata_fields)
self.assertIn('lab', metadata_fields)
self.assertIn('institution', metadata_fields)
self.assertIn('session_id', metadata_fields)
- self.assertIn('experiment description', metadata_fields)
- self.assertIn('session description', metadata_fields)
+ self.assertIn('experiment_description', metadata_fields)
+ self.assertIn('session_description', metadata_fields)
self.assertIn('subject', metadata_fields)
self.assertIn('tasks', metadata_fields)
self.assertIn('behavioral_events', metadata_fields)
- self.assertIn('electrode groups', metadata_fields)
- self.assertIn('ntrode electrode group channel map', metadata_fields)
+ self.assertIn('electrode_groups', metadata_fields)
+ self.assertIn('ntrode_electrode_group_channel_map', metadata_fields)
self.assertIn('units', metadata_fields)
self.assertIn('unspecified', nwb_metadata.metadata['units']['analog'])
@@ -42,7 +42,7 @@ def test_metadata_manager_reading_metadata_successfully(self):
self.assertIn('genotype', subject_fields)
self.assertIn('sex', subject_fields)
self.assertIn('species', subject_fields)
- self.assertIn('subject id', subject_fields)
+ self.assertIn('subject_id', subject_fields)
self.assertIn('weight', subject_fields)
tasks_fields = nwb_metadata.metadata['tasks'][0].keys()
@@ -53,13 +53,13 @@ def test_metadata_manager_reading_metadata_successfully(self):
self.assertIn('description', behavioral_event_fields)
self.assertIn('name', behavioral_event_fields)
- electrode_groups_fields = nwb_metadata.metadata['electrode groups'][0].keys()
+ electrode_groups_fields = nwb_metadata.metadata['electrode_groups'][0].keys()
self.assertIn('id', electrode_groups_fields)
self.assertIn('location', electrode_groups_fields)
self.assertIn('device_type', electrode_groups_fields)
self.assertIn('description', electrode_groups_fields)
- ntrode_probe_channel_map_fields = nwb_metadata.metadata['ntrode electrode group channel map'][0].keys()
+ ntrode_probe_channel_map_fields = nwb_metadata.metadata['ntrode_electrode_group_channel_map'][0].keys()
self.assertIn('map', ntrode_probe_channel_map_fields)
self.assertIn('electrode_group_id', ntrode_probe_channel_map_fields)
self.assertIn('ntrode_id', ntrode_probe_channel_map_fields)
diff --git a/rec_to_nwb/test/processing/res/jaq_metadata.yml b/rec_to_nwb/test/processing/res/jaq_metadata.yml
index db09554c6..f94a32fa1 100644
--- a/rec_to_nwb/test/processing/res/jaq_metadata.yml
+++ b/rec_to_nwb/test/processing/res/jaq_metadata.yml
@@ -1,15 +1,15 @@
-experimenter name: Alison Comrie
+experimenter_name: Alison Comrie
lab: Loren Frank
institution: University of California, San Francisco
-experiment description: Reinforcement learning
-session description: Reinforcement leaarning
+experiment_description: Reinforcement learning
+session_description: Reinforcement leaarning
session_id: jaq_01
subject:
description: Long Evans Rat
genotype: Wild Type
sex: Male
species: Rat
- subject id: Beans
+ subject_id: Beans
weight: Unknown
tasks:
@@ -42,7 +42,7 @@ device:
name:
- Trodes
-electrode groups:
+electrode_groups:
- id: 0
location: mPFC
device_type: 128c-4s8mm6cm-20um-40um-sl
diff --git a/rec_to_nwb/test/processing/res/metadata.yml b/rec_to_nwb/test/processing/res/metadata.yml
index fc6e1e0f7..01a8fd013 100644
--- a/rec_to_nwb/test/processing/res/metadata.yml
+++ b/rec_to_nwb/test/processing/res/metadata.yml
@@ -1,22 +1,22 @@
-experimenter name: Alison Comrie
+experimenter_name: Alison Comrie
lab: Loren Frank
institution: University of California, San Francisco
-experiment description: Reinforcement learning
-session description: Reinforcement leaarning
+experiment_description: Reinforcement learning
+session_description: Reinforcement leaarning
session_id: beans_01
subject:
description: Long Evans Rat
genotype: Wild Type
sex: M
species: Ratticus norvegicus
- subject id: Beans
+ subject_id: Beans
weight: Unknown
units:
analog: unspecified
behavioral_events: unspecified
-data acq device:
+data_acq_device:
- system: sample_system
amplifier: sample_amplifier
adc_circuit: sample_adc_circuit
@@ -112,7 +112,7 @@ behavioral_events:
name: Pump6
-electrode groups:
+electrode_groups:
- id: 0
location: mPFC
device_type: 128c-4s8mm6cm-20um-40um-sl
@@ -133,7 +133,7 @@ electrode groups:
units: 'um'
-ntrode electrode group channel map:
+ntrode_electrode_group_channel_map:
- ntrode_id: 1
electrode_group_id: 0
bad_channels: [0,2]
diff --git a/rec_to_nwb/test/processing/test_dataIterator1Dimension.py b/rec_to_nwb/test/processing/test_dataIterator1Dimension.py
index bd85a8149..b6361b1ab 100644
--- a/rec_to_nwb/test/processing/test_dataIterator1Dimension.py
+++ b/rec_to_nwb/test/processing/test_dataIterator1Dimension.py
@@ -15,7 +15,7 @@ def test_data_iterator(self):
class FakeTimestampDataManager:
def __init__(self):
self.number_of_datasets = 2
- self.file_lenghts_in_datasets = [5, 6]
+ self.file_lengths_in_datasets = [5, 6]
self.fake_timestamps = [np.ndarray(dtype="float64", shape=[5, ]), np.ndarray(dtype="float64", shape=[6, ])]
for i in range(2):
for j in range(len(self.fake_timestamps[i])):
@@ -30,5 +30,5 @@ def get_final_data_shape(self):
def get_number_of_datasets(self):
return self.number_of_datasets
- def get_file_lenghts_in_datasets(self):
- return self.file_lenghts_in_datasets
+ def get_file_lengths_in_datasets(self):
+ return self.file_lengths_in_datasets
diff --git a/rec_to_nwb/test/processing/test_dataIterator2Dimension.py b/rec_to_nwb/test/processing/test_dataIterator2Dimension.py
index 6967530cc..9a7e38061 100644
--- a/rec_to_nwb/test/processing/test_dataIterator2Dimension.py
+++ b/rec_to_nwb/test/processing/test_dataIterator2Dimension.py
@@ -15,7 +15,7 @@ def test_data_iterator(self):
class FakeDataManager:
def __init__(self):
self.number_of_datasets = 2
- self.file_lenghts_in_datasets = [5, 6]
+ self.file_lengths_in_datasets = [5, 6]
self.number_of_files_per_dataset = 2
self.number_of_rows_per_file = 4
self.fake_timestamps = [np.ndarray(dtype="float64", shape=[4, 5]), np.ndarray(dtype="float64", shape=[4, 6])]
@@ -33,8 +33,8 @@ def get_final_data_shape(self):
def get_number_of_datasets(self):
return self.number_of_datasets
- def get_file_lenghts_in_datasets(self):
- return self.file_lenghts_in_datasets
+ def get_file_lengths_in_datasets(self):
+ return self.file_lengths_in_datasets
def get_number_of_files_per_dataset(self):
return self.number_of_files_per_dataset
diff --git a/rec_to_nwb/test/processing/validators/test_nTrodeValidator.py b/rec_to_nwb/test/processing/validators/test_nTrodeValidator.py
index f24b3d59a..075f9e53c 100644
--- a/rec_to_nwb/test/processing/validators/test_nTrodeValidator.py
+++ b/rec_to_nwb/test/processing/validators/test_nTrodeValidator.py
@@ -43,11 +43,11 @@ def test_ntrode_validator_validate_correct_data_successfully(self):
]
metadata = {
- 'electrode groups': [
+ 'electrode_groups': [
{'id': 0, 'location': 'mPFC', 'device_type': 'tetrode_12.5', 'description': 'Probe 1'},
{'id': 1, 'location': 'mPFC', 'device_type': '128c-4s8mm6cm-20um-40um-sl', 'description': 'Probe 2'},
],
- "ntrode electrode group channel map": [
+ "ntrode_electrode_group_channel_map": [
{"ntrode_id": 1, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 0, 1: 1, 2: 2, 3: 3}},
{"ntrode_id": 2, "electrode_group_id": 1, "bad_channels": [0, 1], "map": {0: 4, 1: 5, 2: 6, 3: 7}},
]
@@ -83,10 +83,10 @@ def test_ntrode_validator_validate_ndtrodes_num_less_than_spikes_successfully(se
]
metadata = {
- 'electrode groups': [
+ 'electrode_groups': [
{'id': 0, 'location': 'mPFC', 'device_type': 'tetrode_12.5', 'description': 'Probe 1'},
],
- "ntrode electrode group channel map": [
+ "ntrode_electrode_group_channel_map": [
{"ntrode_id": 1, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 0, 1: 1, 2: 2, 3: 3}},
]
}
@@ -125,11 +125,11 @@ def test_ntrode_validator_validate_ndtrodes_num_greater_than_spikes_successfully
]
metadata = {
- 'electrode groups': [
+ 'electrode_groups': [
{'id': 0, 'location': 'mPFC', 'device_type': 'tetrode_12.5', 'description': 'Probe 1'},
{'id': 1, 'location': 'mPFC', 'device_type': '128c-4s8mm6cm-20um-40um-sl', 'description': 'Probe 2'},
],
- "ntrode electrode group channel map": [
+ "ntrode_electrode_group_channel_map": [
{"ntrode_id": 1, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 0, 1: 1, 2: 2, 3: 3}},
{"ntrode_id": 2, "electrode_group_id": 1, "bad_channels": [0, 1], "map": {0: 4, 1: 5, 2: 6, 3: 7}},
{"ntrode_id": 3, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 8, 1: 9, 2: 10, 3: 11}},
@@ -144,7 +144,7 @@ def test_ntrode_validator_validate_ndtrodes_num_greater_than_spikes_successfully
@should_raise(TypeError)
def test_ntrode_validator_raise_exception_due_to_empty_param(self):
- metadata = {"ntrode electrode group channel map": [
+ metadata = {"ntrode_electrode_group_channel_map": [
{"ntrode_id": 1, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 0, 1: 1, 2: 2, 3: 3}},
{"ntrode_id": 2, "electrode_group_id": 0, "bad_channels": [0, 1], "map": {0: 4, 1: 5, 2: 6, 3: 7}},
{"ntrode_id": 3, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 8, 1: 9, 2: 10, 3: 11}},
@@ -154,7 +154,7 @@ def test_ntrode_validator_raise_exception_due_to_empty_param(self):
@should_raise(InvalidHeaderException)
def test_ntrode_validator_raise_exception_due_to_header_without_spike_ntrodes(self):
- metadata = {"ntrode electrode group channel map": [
+ metadata = {"ntrode_electrode_group_channel_map": [
{"ntrode_id": 1, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 0, 1: 1, 2: 2, 3: 3}},
{"ntrode_id": 2, "electrode_group_id": 0, "bad_channels": [0, 1], "map": {0: 4, 1: 5, 2: 6, 3: 7}},
{"ntrode_id": 3, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 8, 1: 9, 2: 10, 3: 11}},
@@ -166,7 +166,7 @@ def test_ntrode_validator_raise_exception_due_to_header_without_spike_ntrodes(se
@should_raise(InvalidMetadataException)
def test_should_raise_exception_due_to_metadata_without_ntrodes(self):
- metadata = {"ntrode electrode group channel map": []}
+ metadata = {"ntrode_electrode_group_channel_map": []}
validator = NTrodeValidator(metadata, self.header, [])
validator.create_summary()
@@ -197,11 +197,11 @@ def test_should_raise_exception_due_to_incompatible_probes_metadata_with_ntrodes
]
metadata = {
- 'electrode groups': [
+ 'electrode_groups': [
{'id': 0, 'location': 'mPFC', 'device_type': 'tetrode_12.5', 'description': 'Probe 1'},
{'id': 1, 'location': 'mPFC', 'device_type': '128c-4s8mm6cm-20um-40um-sl', 'description': 'Probe 2'},
],
- "ntrode electrode group channel map": [
+ "ntrode_electrode_group_channel_map": [
{"ntrode_id": 1, "electrode_group_id": 0, "bad_channels": [0, 2], "map": {0: 0, 1: 1, 2: 2, 3: 3}},
{"ntrode_id": 2, "electrode_group_id": 1, "bad_channels": [0, 1], "map": {0: 4, 1: 5, 2: 6, 3: 7}},
]
diff --git a/setup.py b/setup.py
index 0d1d6850f..238300f16 100644
--- a/setup.py
+++ b/setup.py
@@ -1,8 +1,7 @@
-version = '0.1.019'
-print(version)
-
-from setuptools import setup, find_packages
+from setuptools import find_packages, setup
+version = '0.1.020'
+print(version)
setup(
name='rec_to_nwb',
@@ -10,8 +9,12 @@
author='Novela Neurotech',
url="https://github.com/NovelaNeuro/rec_to_nwb",
packages=find_packages(),
- package_data={'': ['logging.conf', 'data/fl_lab_header.xsd', 'data/header_schema.xsd', 'data/default_header.xml']},
+ package_data={'': ['logging.conf', 'data/fl_lab_header.xsd',
+ 'data/header_schema.xsd', 'data/default_header.xml']},
description='Data transformation from rec binary files into NWB 2.0 format',
platforms='Posix; MacOS X; Windows',
- python_requires='>=3.6'
+ python_requires='>=3.6',
+ install_requires=[
+ 'rec_to_binaries>=0.6.12'
+ ]
)