From ae671ec52639ba1d7fdea4040d06f4360bab31d2 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Fri, 19 Jan 2024 11:50:04 -0500 Subject: [PATCH 01/11] attempt at docker test --- .github/workflows/docker_test.yml | 44 +++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 .github/workflows/docker_test.yml diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml new file mode 100644 index 0000000..ea7b248 --- /dev/null +++ b/.github/workflows/docker_test.yml @@ -0,0 +1,44 @@ +name: build_docker + +on: + push: + branches: + - main + - dev + #pull_request: + # branches: + # - main + # - dev + workflow_call: + secrets: + DOCKERENV: + required: true + description: 'Access to AWS RDS server' + +jobs: + build_docker: + runs-on: ubuntu-20.04 + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up Docker Compose + run: | + sudo apt-get update + sudo apt-get install -y docker-compose + + - name: Build and run Docker Compose + run: | + docker-compose -f --env-file=${{secrets.DOCKERENV}} ../docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yml up --build -d + docker-compose -f --env-file=${{secrets.DOCKERENV}} ../docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yml up --build -d + docker-compose -f --env-file=${{secrets.DOCKERENV}} ../docker/standard_worker/dist/debian/docker-compose-standard_worker.yml up --build -d + + # Add additional steps here for testing or other actions + + - name: Clean up Docker Compose + run: | + docker-compose -f ../docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yml down + docker-compose -f ../docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yml down + docker-compose -f ../docker/standard_worker/dist/debian/docker-compose-standard_worker.yml down + From 9476c9fb2c5a8b94e6430211e50cb005b709078a Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Fri, 19 Jan 2024 11:52:00 -0500 Subject: [PATCH 02/11] attempt2 at docker_test --- .github/workflows/docker_test.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index ea7b248..d25641f 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -30,9 +30,9 @@ jobs: - name: Build and run Docker Compose run: | - docker-compose -f --env-file=${{secrets.DOCKERENV}} ../docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yml up --build -d - docker-compose -f --env-file=${{secrets.DOCKERENV}} ../docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yml up --build -d - docker-compose -f --env-file=${{secrets.DOCKERENV}} ../docker/standard_worker/dist/debian/docker-compose-standard_worker.yml up --build -d + docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yml up --build -d + docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yml up --build -d + docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/standard_worker/dist/debian/docker-compose-standard_worker.yml up --build -d # Add additional steps here for testing or other actions From 59ababf614f419dda6ce61021d7ba1f5edc45fd7 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Fri, 19 Jan 2024 13:22:41 -0500 Subject: [PATCH 03/11] attempt #3 --- .github/workflows/docker_test.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index d25641f..1832d27 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -30,9 +30,9 @@ jobs: - name: Build and run Docker Compose run: | - docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yml up --build -d - docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yml up --build -d - docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/standard_worker/dist/debian/docker-compose-standard_worker.yml up --build -d + docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yaml -p up --build -d + docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yaml -p up --build -d + docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p up --build -d # Add additional steps here for testing or other actions From ef9eae074ebd5fc25c369ae01134abc9c36718d4 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Fri, 19 Jan 2024 13:30:52 -0500 Subject: [PATCH 04/11] attempt #4 --- .github/workflows/docker_test.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index 1832d27..e3abc91 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -30,9 +30,9 @@ jobs: - name: Build and run Docker Compose run: | - docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yaml -p up --build -d - docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yaml -p up --build -d - docker-compose --env-file=${{secrets.DOCKERENV}} -f ../docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p up --build -d + docker-compose --env-file=${{secrets.DOCKERENV}} -f ./docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build --no-cache + docker-compose --env-file=${{secrets.DOCKERENV}} -f ./docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker build --no-cache + docker-compose --env-file=${{secrets.DOCKERENV}} -f ./docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build --no-cache # Add additional steps here for testing or other actions From 0dc61cf845028c3140aed22d43049872c7edcaa9 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Fri, 19 Jan 2024 13:38:30 -0500 Subject: [PATCH 05/11] attempt #5 --- .github/workflows/docker_test.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index e3abc91..22a97a1 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -30,15 +30,16 @@ jobs: - name: Build and run Docker Compose run: | - docker-compose --env-file=${{secrets.DOCKERENV}} -f ./docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build --no-cache - docker-compose --env-file=${{secrets.DOCKERENV}} -f ./docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker build --no-cache - docker-compose --env-file=${{secrets.DOCKERENV}} -f ./docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build --no-cache + docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build --no-cache + # docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker build --no-cache + # docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build --no-cache # Add additional steps here for testing or other actions - name: Clean up Docker Compose run: | - docker-compose -f ../docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yml down - docker-compose -f ../docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yml down - docker-compose -f ../docker/standard_worker/dist/debian/docker-compose-standard_worker.yml down + docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build down + # docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker down + # docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build down + From 3951e07c12b674cafad29ccd7d5a852ecc5337f2 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Fri, 19 Jan 2024 13:48:21 -0500 Subject: [PATCH 06/11] attempt #6 --- .github/workflows/docker_test.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index 22a97a1..c453eda 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -30,16 +30,16 @@ jobs: - name: Build and run Docker Compose run: | - docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build --no-cache - # docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker build --no-cache - # docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build --no-cache + docker-compose -f docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build --no-cache + docker-compose -f docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker build --no-cache + docker-compose -f docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build --no-cache # Add additional steps here for testing or other actions - name: Clean up Docker Compose run: | - docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/calcium_imaging_worker/dist/debian/docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build down - # docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/spike_sorting_local_worker/dist/debian/docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker down - # docker-compose --env-file=${{secrets.DOCKERENV}} -f /docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build down + docker-compose -f docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build down + docker-compose -f docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker down + docker-compose -f docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build down From 6ddc30cb9eb7c49d6b36445d476125a1f2cd5d8f Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:01:47 -0500 Subject: [PATCH 07/11] attempt #? --- .github/workflows/docker_test.yml | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index c453eda..cd28285 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -17,6 +17,20 @@ on: jobs: build_docker: + env: + JHUB_VER: 1.4.2 + PY_VER: 3.9 + DIST: debian + WORKFLOW_VERSION: 0.1.0 + REPO_OWNER: bernardosabatinilab + REPO_NAME: sabatini-datajoint-pipeline + CONTAINER_USER: anaconda + DJ_HOST: sabatini-dj-prd01.cluster-cjvmzxer50q5.us-east-1.rds.amazonaws.com + DJ_USER: jbw25 + DJ_PASS: ${{ secrets.DOCKERENV }} + DATABASE_PREFIX: sabatini_dj_ + RAW_ROOT_DATA_DIR: /home/${CONTAINER_USER}/inbox + PROCESSED_ROOT_DATA_DIR: /home/${CONTAINER_USER}/outbox runs-on: ubuntu-20.04 steps: @@ -30,16 +44,10 @@ jobs: - name: Build and run Docker Compose run: | - docker-compose -f docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build --no-cache - docker-compose -f docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker build --no-cache - docker-compose -f docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build --no-cache - - # Add additional steps here for testing or other actions + docker-compose -f ./docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build - name: Clean up Docker Compose run: | - docker-compose -f docker-compose-calcium_imaging_worker.yaml -p sabatini-datajoint-pipeline_calcium_imaging_worker build down - docker-compose -f docker-compose-spike_sorting_local_worker.yaml -p sabatini-datajoint-pipeline_spike_sorting_local_worker down - docker-compose -f docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build down + docker-compose -f ./docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml down From 53a86c84dd33f756918808301f9d3e0a846c6a97 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:13:01 -0500 Subject: [PATCH 08/11] attempt #8 --- .github/workflows/docker_test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index cd28285..b380c54 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -29,8 +29,8 @@ jobs: DJ_USER: jbw25 DJ_PASS: ${{ secrets.DOCKERENV }} DATABASE_PREFIX: sabatini_dj_ - RAW_ROOT_DATA_DIR: /home/${CONTAINER_USER}/inbox - PROCESSED_ROOT_DATA_DIR: /home/${CONTAINER_USER}/outbox + RAW_ROOT_DATA_DIR: ./docker + PROCESSED_ROOT_DATA_DIR: ./docker runs-on: ubuntu-20.04 steps: From f322c811b74b7f6e21f249bbb803b4493cc9b3c6 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:16:33 -0500 Subject: [PATCH 09/11] attempt..again --- .github/workflows/docker_test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index b380c54..dddb39a 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -29,8 +29,8 @@ jobs: DJ_USER: jbw25 DJ_PASS: ${{ secrets.DOCKERENV }} DATABASE_PREFIX: sabatini_dj_ - RAW_ROOT_DATA_DIR: ./docker - PROCESSED_ROOT_DATA_DIR: ./docker + RAW_ROOT_DATA_DIR: ./docker/standard_worker/dist/debian/Inbox + PROCESSED_ROOT_DATA_DIR: ./docker/standard_worker/dist/debian/Outbox runs-on: ubuntu-20.04 steps: From 45768563a1913ac69072ceac107c73c786a94d06 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:18:24 -0500 Subject: [PATCH 10/11] and again... --- .github/workflows/docker_test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml index dddb39a..cd28285 100644 --- a/.github/workflows/docker_test.yml +++ b/.github/workflows/docker_test.yml @@ -29,8 +29,8 @@ jobs: DJ_USER: jbw25 DJ_PASS: ${{ secrets.DOCKERENV }} DATABASE_PREFIX: sabatini_dj_ - RAW_ROOT_DATA_DIR: ./docker/standard_worker/dist/debian/Inbox - PROCESSED_ROOT_DATA_DIR: ./docker/standard_worker/dist/debian/Outbox + RAW_ROOT_DATA_DIR: /home/${CONTAINER_USER}/inbox + PROCESSED_ROOT_DATA_DIR: /home/${CONTAINER_USER}/outbox runs-on: ubuntu-20.04 steps: From c8da4554e30fb40a5753ce452f12abfce3021e95 Mon Sep 17 00:00:00 2001 From: jbwallace123 <41006280+jbwallace123@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:41:33 -0500 Subject: [PATCH 11/11] update docs --- .github/workflows/docker_test.yml | 53 ------------------------------- docs/source/How To.rst | 31 ++++++++++-------- 2 files changed, 17 insertions(+), 67 deletions(-) delete mode 100644 .github/workflows/docker_test.yml diff --git a/.github/workflows/docker_test.yml b/.github/workflows/docker_test.yml deleted file mode 100644 index cd28285..0000000 --- a/.github/workflows/docker_test.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: build_docker - -on: - push: - branches: - - main - - dev - #pull_request: - # branches: - # - main - # - dev - workflow_call: - secrets: - DOCKERENV: - required: true - description: 'Access to AWS RDS server' - -jobs: - build_docker: - env: - JHUB_VER: 1.4.2 - PY_VER: 3.9 - DIST: debian - WORKFLOW_VERSION: 0.1.0 - REPO_OWNER: bernardosabatinilab - REPO_NAME: sabatini-datajoint-pipeline - CONTAINER_USER: anaconda - DJ_HOST: sabatini-dj-prd01.cluster-cjvmzxer50q5.us-east-1.rds.amazonaws.com - DJ_USER: jbw25 - DJ_PASS: ${{ secrets.DOCKERENV }} - DATABASE_PREFIX: sabatini_dj_ - RAW_ROOT_DATA_DIR: /home/${CONTAINER_USER}/inbox - PROCESSED_ROOT_DATA_DIR: /home/${CONTAINER_USER}/outbox - runs-on: ubuntu-20.04 - - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Set up Docker Compose - run: | - sudo apt-get update - sudo apt-get install -y docker-compose - - - name: Build and run Docker Compose - run: | - docker-compose -f ./docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml -p sabatini-datajoint-pipeline_standard build - - - name: Clean up Docker Compose - run: | - docker-compose -f ./docker/standard_worker/dist/debian/docker-compose-standard_worker.yaml down - - diff --git a/docs/source/How To.rst b/docs/source/How To.rst index f1372e9..5221818 100644 --- a/docs/source/How To.rst +++ b/docs/source/How To.rst @@ -8,7 +8,7 @@ If you are new to DataJoint, we recommend getting started by learning about the More information can be found in the `DataJoint documentation `_. We can run the workflow using the provided docker containers (for more information :doc:`WorkerDeployment`). Or, we can -run locally using the `provided jupyter notebooks `_. +run locally using the `provided jupyter notebooks `_. These notebooks provide a good starting point and can be modified to fit your needs, just remember to check that your kernel is set to the ``sabatini-datajoint`` kernel. @@ -336,19 +336,25 @@ You can also run the pipeline manually by running the following: Ephys pipeline ############## The ephys pipeline is designed to process neuropixel data acquired with SpikeGLX. It will run through Kilosort2.5 and use -`ecephys `_ for post-processing. -The ``/Outbox`` directory will be automatically populated with the processed data. +`ecephys `_ for post-processing. Currently, we have two workflows for processing the data: +a docker container or a manual pipeline through the provided jupyter notebook. Input data ---------- You will need all of the output files from SpikeGLX: ``.ap.bin``, ``.lf.bin``, ``.ap.meta``, and ``.lf.meta``. You can also use data that you have pre-processed throught CatGT. -Running the ephys pipeline --------------------------- +Running the ephys pipeline through the docker container +------------------------------------------------------- Once you have inserted the ``Subject``, ``Session``, and ``SessionDirectory`` tables and you have the appropriate files in place, you can then proceed with running the ephys pipeline by simply upping the spike_sorting_local_worker docker container detailed in :doc:`WorkerDeployment`. +It will automatically detect the new data and process it and populate the ``EphysRecording``, ``CuratedClustering``, ``WaveformSet``, and ``LFP`` tables. + +Running the ephys pipeline manually +----------------------------------- +We have provided an ephys jupyter notebook that will guide you through the ephys pipeline. Importantly, you will have to configure your spike sorter +of choice and the paths to the data in the notebook. -Using the docker container is the recommended way to run the pipeline. If you must run the pipeline manually, please contact the database manager. +`Ephys jupyter notebook `_. Table organization ------------------ @@ -380,25 +386,22 @@ The calcium imaging processing pipeline will populate the ``imaging`` table. DeepLabCut pipeline ################### -The DeepLabCut pipeline is designed to process videos through DeepLabCut. It will automatically populate the ``/Outbox`` directory with the processed data. - -**Important Note**: This pipeline assumes that you have already created a DeepLabCut project and have a trained network. If you have not done this, please -refer to the `DeepLabCut documentation `_. +The DeepLabCut pipeline is designed to process and annotate videos through DeepLabCut. We have updated the workflow so that you can run DeepLabCut from +beginning to end through the provided jupyter notebook. Input data ---------- -You will need a pretrained network organized in the following format: ``/Inbox/dlc_projects/PROJECT_PATH``. You will also need to have the videos you would like to process +Once you have created your ``project_folder``, it is important that you place it in ``/Inbox/dlc_projects/PROJECT_PATH``. You will also need to have the videos you would like to process organized in the following format: ``/Inbox/Subject/dlc_behavior_videos/*.avi``. Running the DeepLabCut pipeline ------------------------------- -This is a manual pipeline. You will need to run the provided `DeepLabCut jupyter notebook `_. +This is a manual pipeline. You will need to run the provided ``_. You will need to edit all of the relevant information and paths in the notebook. Table organization ------------------ -The DeepLabCut processing pipeline will populate the ``model`` table. - +The DeepLabCut processing pipeline will populate the ``model`` and ``train`` tables.