diff --git a/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_behaviorinterface.py b/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_behaviorinterface.py index a90e4f5..863b285 100644 --- a/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_behaviorinterface.py +++ b/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_behaviorinterface.py @@ -5,6 +5,7 @@ from h5py import File from hdmf.common.table import DynamicTableRegion from pynwb.behavior import BehavioralTimeSeries, TimeSeries +from pynwb.device import Device from ndx_events import EventTypesTable, EventsTable, Task, TimestampVectorData from neuroconv.basedatainterface import BaseDataInterface @@ -29,6 +30,12 @@ def get_metadata(self) -> DeepDict: def get_metadata_schema(self) -> dict: metadata_schema = super().get_metadata_schema() metadata_schema["properties"]["Behavior"] = get_base_schema(tag="Behavior") + metadata_schema["properties"]["Behavior"]["properties"]["Module"] = { + "properties": { + "name": {"type": "string"}, + "description": {"type": "string"}, + }, + } metadata_schema["properties"]["Behavior"]["properties"]["TimeSeries"] = { "type": "array", "items": { @@ -59,6 +66,17 @@ def get_metadata_schema(self) -> dict: }, }, } + metadata_schema["properties"]["Behavior"]["properties"]["Devices"] = { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "description": {"type": "string"}, + "manufacturer": {"type": "string"}, + }, + }, + } return metadata_schema def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): @@ -92,8 +110,8 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): # Add Data to NWBFile behavior_module = nwb_helpers.get_module( nwbfile=nwbfile, - name="behavior", - description="Behavioral data from the experiment.", + name=metadata["Behavior"]["Module"]["name"], + description=metadata["Behavior"]["Module"]["description"], ) # Add BehavioralTimeSeries @@ -148,3 +166,8 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): task = Task(event_types=event_types_table) nwbfile.add_lab_meta_data(task) + + # Add Devices + for device_kwargs in metadata["Behavior"]["Devices"]: + device = Device(**device_kwargs) + nwbfile.add_device(device) diff --git a/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_metadata.yaml b/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_metadata.yaml index 48cedfe..f6c0823 100644 --- a/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_metadata.yaml +++ b/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_metadata.yaml @@ -25,18 +25,21 @@ Ecephys: ElectrodeGroup: - name: ElectrodeGroup description: ElectrodeGroup for all channels in the recording probe. - location: Auditory Cortex + location: Primary Auditory Cortex (A1) device: MasmanidisSiliconMicroprobe128AxN ElectricalSeries: - name: ElectricalSeries description: Recording of AC neural responses in mice performing this behavioral task will utilize dense 128-channel recording probes (Masmanidis Lab). These recording probes span a depth ~1mm, allowing for sampling of all layers of cortex. Electrophysiology data will be recorded using OpenEphys Acquisition Board v2.4 and associated OpenEphys GUI software. Behavior: + Module: + name: behavior + description: C57BL/6 mice will first be water restricted, habituated to head fixation in the behavioral set up for two days and classically conditioned to associate a 16 kHz tone with a small water reward given 150 ms after the tone plays (~12 seconds inter-tone-interval). Mice will then be trained for 15 to 20 sessions on an auditory guided task described as follows. Inspired by human performance on stringed instruments, whereby a target note is achieved via modulation of forelimb and hand movements, we have engineered a novel behavioral paradigm that requires mice to skillfully adjust the size of lever presses in response to a dynamic virtual acoustic environment. Mice are trained to press a small 3-D printed lever forward with their forelimb toward a 2 mm wide target zone. Mice hear a 16 Hz tone when the lever enters the zone and a 10 kHz tone if the press exceeds the bounds of the zone. Presses that peak within the zone produce only the entry tone and are rewarded when the lever returns to the starting position. Presses that undershoot (producing no tones) or overshoot (producing both an entry and an exit tone) are unrewarded. Every 30 trials, which we refer to as a block, the target zone is relocated without warning and the mice must use acoustic feedback to adjust their lever presses to peak at the new location. TimeSeries: - name: encoder - description: Sampled values for entire duration of experiment for lever pressing/treadmill behavior read from a quadrature encoder. + description: Sampled values for entire duration of experiment for lever pressing behavior read from a rotary encoder (US Digital). Digital signals for licking and lever movement were collected by a data acquisition card (National Instruments) connected to a computer and logged by custom Matlab software (Mathworks, PsychToolBox) and sampled at 2kHz. - name: lick - description: Samples values for entire duration of experiment for voltage signal readout from an infrared/capacitive) lickometer sensor. + description: Samples values for entire duration of experiment for voltage signal readout from a custom infrared/capacitive lickometer sensor (Schneider Lab). Digital signals for licking and lever movement were collected by a data acquisition card (National Instruments) connected to a computer and logged by custom Matlab software (Mathworks, PsychToolBox) and sampled at 2kHz. Events: - name: target description: Time at which the target zone is entered during a press. @@ -53,13 +56,19 @@ Behavior: description: Times at which tuning tones are played to an animal after a behavioral experiment during ephys recording sessions. VideoCamera1: - name: video_camera_1 - description: Three IR video cameras (AAK CA20 600TVL 2.8MM) are used to monitor the experiments from different angles of interest, allowing for offline analysis of body movements, pupillometry, and other behavioral data if necessary. + description: Two IR video cameras (AAK CA20 600TVL 2.8MM) are used to monitor the experiments from different angles of interest, allowing for offline analysis of body movements, pupillometry, and other behavioral data if necessary. Camera 1 is a side angle view of the mouse. unit: Frames VideoCamera2: - name: video_camera_2 - description: Three IR video cameras (AAK CA20 600TVL 2.8MM) are used to monitor the experiments from different angles of interest, allowing for offline analysis of body movements, pupillometry, and other behavioral data if necessary. + description: Two IR video cameras (AAK CA20 600TVL 2.8MM) are used to monitor the experiments from different angles of interest, allowing for offline analysis of body movements, pupillometry, and other behavioral data if necessary. Camera 2 is a zoomed-in view of the pupil of the mouse. unit: Frames + Devices: + - name: rotary_encoder + description: H5 BALL BEARING OPTICAL SHAFT ENCODER + manufacturer: US Digital + - name: lickometer + description: The lickometer comprised a custom-mounted (3D printed using Formlabs Form2) IR-beam emitter and receiver. IR signal was titrated and pre-processed using a custom printed circuit board (designed by Melissa Caras and Dan Sanes) to generate a binary TTL signal with IR sensitivity controlled by a potentiometer. + manufacturer: Schneider Lab Sorting: units_description: Neural spikes will be sorted offline using Kilosort 2.5 and Phy2 software and manually curated to ensure precise spike time acquisition. - diff --git a/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_notes.md b/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_notes.md index 60d1fb1..3852249 100644 --- a/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_notes.md +++ b/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_notes.md @@ -6,14 +6,9 @@ - 'push' is basically some kind of trials table, but need descriptions for variables ex. ITI_respect? ## Video -- In research overview, 3 cameras are mentioned, but only 2 appear in example data. Missing camera? Or incorrect description? ## Data Requests - Mice sexes - Remaining data for Grant's project - More detailed position info for recording probe - - Subfield of auditory cortex: A1? A2? AAF? etc. - - stereotactic coordinates of the whole probe -- Detailed description of the behavioral paradigm -- Description of lickometer and lever/treadmill quadrature encoder. - Detailed description of temporal alignment procedure. diff --git a/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_nwbconverter.py b/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_nwbconverter.py index 4653d19..5b1d40e 100644 --- a/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_nwbconverter.py +++ b/src/schneider_lab_to_nwb/schneider_2024/schneider_2024_nwbconverter.py @@ -19,5 +19,4 @@ class Schneider2024NWBConverter(NWBConverter): Behavior=Schneider2024BehaviorInterface, VideoCamera1=VideoInterface, VideoCamera2=VideoInterface, - VideoCamera3=VideoInterface, )