Skip to content

Commit

Permalink
Merge branch 'main' into add-Xulu-behavior
Browse files Browse the repository at this point in the history
  • Loading branch information
calderast authored Jan 8, 2025
2 parents 4b64680 + 1388254 commit 7086627
Show file tree
Hide file tree
Showing 32 changed files with 856 additions and 96 deletions.
4 changes: 4 additions & 0 deletions .git_archival.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
node: $Format:%H$
node-date: $Format:%cI$
describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$
ref-names: $Format:%D$
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.git_archival.txt export-subst
14 changes: 14 additions & 0 deletions .github/workflows/codespell.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: Codespell
on:
pull_request:
workflow_dispatch:

jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Codespell
uses: codespell-project/actions-codespell@v2
13 changes: 13 additions & 0 deletions .github/workflows/ruff.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
name: Ruff
on:
pull_request:
workflow_dispatch:

jobs:
ruff:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Run ruff
uses: astral-sh/ruff-action@v3
162 changes: 162 additions & 0 deletions .github/workflows/test_package_build.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
name: Test building package and publish

on:
push:
branches:
- main
- maint/*
tags:
- "*"
pull_request:
branches:
- main
- maint/*
defaults:
run:
shell: bash
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3

- name: Build sdist and wheel
run: |
pip install --upgrade build twine
python -m build
twine check dist/*
- name: Upload sdist and wheel artifacts
uses: actions/upload-artifact@v4
with:
name: dist
path: dist/

- name: Build git archive
run: mkdir archive && git archive -v -o archive/archive.tgz HEAD

- name: Upload git archive artifact
uses: actions/upload-artifact@v4
with:
name: archive
path: archive/

- name: Download test data
env:
BOX_USERNAME: ${{ secrets.BOX_USERNAME }}
BOX_PASSWORD: ${{ secrets.BOX_PASSWORD }}
run: |
python tests/download_test_data.py
tree tests/test_data
- name: Upload test data artifact
uses: actions/upload-artifact@v4
with:
name: test_data
path: tests/test_data/downloaded

test-package:
runs-on: ubuntu-latest
needs: [build]
strategy:
matrix:
package: ['wheel', 'sdist', 'archive', 'editable']
steps:
- name: Checkout repo
# Used to access the tests. Only install from source if matrix.package == 'editable'.
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Download sdist and wheel artifacts
if: matrix.package == 'wheel' || matrix.package == 'sdist'
uses: actions/download-artifact@v4
with:
name: dist
path: dist/

- name: Download git archive artifact
if: matrix.package == 'archive'
uses: actions/download-artifact@v4
with:
name: archive
path: archive/

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"

- name: Display Python version
run: python -c "import sys; print(sys.version)"

- name: Update pip
run: pip install --upgrade pip

- name: Install wheel
if: matrix.package == 'wheel'
run: pip install dist/*.whl

- name: Install sdist
if: matrix.package == 'sdist'
run: pip install dist/*.tar.gz

- name: Install archive
if: matrix.package == 'archive'
run: pip install archive/archive.tgz

- name: Install editable
if: matrix.package == 'editable'
run: pip install -e .

- name: Download test data artifact
uses: actions/download-artifact@v4
with:
name: test_data
path: tests/test_data/downloaded

- name: Run tests without coverage
if: matrix.package != 'editable'
run: |
pip install pytest
pip list
pytest -v
- name: Run tests on editable install with coverage
if: matrix.package == 'editable'
run: |
pip install pytest-cov
pip list
pytest --cov=src --cov-report=xml --cov-report=term -v
- name: Upload coverage reports to Codecov
if: matrix.package == 'editable'
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

# pypi-publish:
# name: Upload release to PyPI
# runs-on: ubuntu-latest
# needs: [test-package]
# environment:
# name: pypi
# url: https://pypi.org/p/jdb-to-nwb
# permissions:
# id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
# steps:
# - uses: actions/download-artifact@v4
# with:
# name: dist
# path: dist/
# - name: Publish package distributions to PyPI
# uses: pypa/gh-action-pypi-publish@release/v1
5 changes: 3 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,8 @@ _version.py

# Large test data
tests/test_data/photometry/*
tests/test_data/raw_ephys/*
tests/test_data/processed_ephys/*
tests/test_data/behavior/IM-1478*
tests/test_data/downloaded/*

# Box credentials
.env
32 changes: 32 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,38 @@ cp tests/metadata_full.yaml .
jdb_to_nwb metadata_full.yaml out.nwb
```

## Downloading test data

The large test data files are stored in a shared UCSF Box account. To get access to the test data,
please contact the repo maintainers.

Create a new file called `.env` in the root directory of the repository and add your Box credentials:
```bash
BOX_USERNAME=<your_box_username>
BOX_PASSWORD=<your_box_password>
```
Or set the environment variables in your shell:
```bash
export BOX_USERNAME=<your_box_username>
export BOX_PASSWORD=<your_box_password>
```

Then run the download script:
```bash
python tests/download_test_data.py
```

Notes:
- Run `python tests/test_data/create_raw_ephys_test_data.py` to re-create the test data for `raw_ephys`.
- Run `python tests/test_data/create_processed_ephys_test_data.py` to re-create the test data for `processed_ephys`.
- `tests/test_data/processed_ephys/impedance.csv` was manually created for testing purposes.
- `tests/test_data/processed_ephys/geom.csv` was manually created for testing purposes.
- Some files (`settings.xml`, `structure.oebin`) nested within `tests/test_data/raw_ephys/2022-07-25_15-30-00`
were manually created for testing purposes.

The GitHub Actions workflow (`.github/workflows/test_package_build.yml`) will automatically download the test data and run the tests.


## Versioning

Versioning is handled automatically using [hatch-vcs](https://github.com/ofek/hatch-vcs) using the latest
Expand Down
2 changes: 1 addition & 1 deletion notebook/read_phot_box_files.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -721,7 +721,7 @@
"print(sig2)\n",
"print(OG_signals['sig2'])\n",
"\n",
"# print(f\"Lenth of visits: {len(visits)}\")\n",
"# print(f\"Length of visits: {len(visits)}\")\n",
"# print(visits)\n",
"\n",
"import matplotlib.pyplot as plt\n",
Expand Down
8 changes: 5 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,11 @@ classifiers = [
dependencies = [
"spikeinterface >= 0.101.0",
"tqdm",
"neuroconv == 0.6.0",
"neuroconv == 0.6.5",
"pynwb >= 2.8.1",
"ndx_fiber_photometry",
"ndx_franklab_novela",
"scikit-learn",
]
dynamic = ["version"]

Expand All @@ -40,6 +41,7 @@ dev = [
"ruff",
"codespell",
]
test = ["pytest", "pytest-cov"]

[project.urls]
"Homepage" = "https://github.com/calderast/jdb_to_nwb/"
Expand Down Expand Up @@ -79,7 +81,7 @@ exclude = [

[tool.codespell]
skip = "htmlcov,.git,.mypy_cache,.pytest_cache,.coverage,*.pdf,*.svg,venvs"
ignore-words-list = "datas,assertIn"
ignore-words-list = "datas,assertIn,lockin"

[tool.coverage.run]
branch = true
Expand All @@ -97,7 +99,7 @@ preview = true
exclude = ".git|.mypy_cache|.tox|.venv|venv|.ipynb_checkpoints|_build/|dist/|__pypackages__|.ipynb"

[tool.ruff]
lint.select = ["E", "F", "T100", "T201", "T203"]
lint.select = ["E", "F", "T100", "T203"]
exclude = [
".git",
".tox",
Expand Down
5 changes: 5 additions & 0 deletions src/jdb_to_nwb/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from ._version import __version__

__all__ = [
"__version__",
]
41 changes: 34 additions & 7 deletions src/jdb_to_nwb/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
import yaml

from pynwb import NWBFile, NWBHDF5IO
from pynwb.file import Subject
from datetime import datetime
from dateutil import tz

from . import __version__
from .convert_raw_ephys import add_raw_ephys
from .convert_spikes import add_spikes
from .convert_behavior import add_behavior
Expand All @@ -19,19 +21,44 @@ def create_nwbs(
with open(metadata_file_path, "r") as f:
metadata = yaml.safe_load(f)

# parse subject metadata
subject = Subject(**metadata["subject"])

# parse surgery metadata
surgery = "..." # TODO parse from structured metadata

# TODO: read these from metadata
nwbfile = NWBFile(
session_description="Mock session",
session_start_time=datetime.now(tz.tzlocal()),
identifier="mock_session",
session_description="Mock session", # TODO: generate this from metadata
session_start_time=datetime.now(tz.tzlocal()), # TODO: update this
identifier="mock_session", # TODO: update this
session_id=metadata.get("session_id"),
surgery=surgery,
notes=metadata.get("notes"),
experimenter=metadata.get("experimenter"),
institution=metadata.get("institution"),
lab=metadata.get("lab"),
keywords=metadata.get("keywords"),
experiment_description=metadata.get("experiment_description"),
related_publications=metadata.get("related_publications"),
subject=subject,
source_script="jdb_to_nwb " + __version__,
source_script_file_name="convert.py",
)

# if photometry is present, timestamps should be aligned to the photometry
add_photometry(nwbfile=nwbfile, metadata=metadata)
photometry_start_in_arduino_time = add_behavior(nwbfile=nwbfile, metadata=metadata)

Check failure on line 51 in src/jdb_to_nwb/convert.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (F841)

src/jdb_to_nwb/convert.py:51:5: F841 Local variable `photometry_start_in_arduino_time` is assigned to but never used

add_raw_ephys(nwbfile=nwbfile, metadata=metadata)
add_spikes(nwbfile=nwbfile, metadata=metadata)
photometry_start_in_arduino_time = add_behavior(nwbfile=nwbfile, metadata=metadata)
add_photometry(nwbfile=nwbfile, metadata=metadata)

print(f"Writing file, including iterative read from raw ephys data...")
# TODO: time alignment

# reset the session start time to the earliest of the data streams
nwbfile.fields["session_start_time"] = datetime.now(tz.tzlocal())

print("Writing file, including iterative read from raw ephys data...")

with NWBHDF5IO(output_nwb_file_path, mode="w") as io:
io.write(nwbfile)
Expand All @@ -45,4 +72,4 @@ def cli():
parser.add_argument("output_nwb_file_path", type=Path, help="Path to the output NWB file.")
args = parser.parse_args()

create_nwbs(args.metadata_file_path, args.output_nwb_file_path)
create_nwbs(args.metadata_file_path, args.output_nwb_file_path)
4 changes: 2 additions & 2 deletions src/jdb_to_nwb/convert_behavior.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ def load_maze_configurations(maze_configuration_file_path: Path):
maze = set(map(int, line.strip().split(",")))
maze_sequence.append(maze)
return maze_sequence
except:
warnings.warn(f"Could not load maze configurations from {maze_configuration_file_path}")
except Exception as e:
warnings.warn(f"Could not load maze configurations from {maze_configuration_file_path}: {e}")
return []


Expand Down
2 changes: 1 addition & 1 deletion src/jdb_to_nwb/convert_photometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ def airPLS(data, lambda_=1e8, max_iterations=50):
"""

num_data_points = data.shape[0]
weights = np.ones(num_data_points) # Set the intial weights to 1 to treat all points equally
weights = np.ones(num_data_points) # Set the initial weights to 1 to treat all points equally

# Loop runs up to 'max_iterations' times to adjust the weights and fit the baseline
for i in range(1, max_iterations + 1):
Expand Down
Loading

0 comments on commit 7086627

Please sign in to comment.