From 6c3dd2253985655ea43c9efc4ec2e3b21ca63a74 Mon Sep 17 00:00:00 2001 From: bendichter Date: Tue, 19 Feb 2019 17:41:55 -0800 Subject: [PATCH 1/6] mention the order of dimensions in doc for DecompositionSeries.data and enforce shape --- src/pynwb/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pynwb/misc.py b/src/pynwb/misc.py index 3ecde6610..9589d912f 100644 --- a/src/pynwb/misc.py +++ b/src/pynwb/misc.py @@ -261,7 +261,7 @@ class DecompositionSeries(TimeSeries): @docval({'name': 'name', 'type': str, 'doc': 'The name of this TimeSeries dataset'}, {'name': 'data', 'type': ('array_data', 'data', TimeSeries), - 'doc': 'The data this TimeSeries dataset stores. Can also store binary data e.g. image frames'}, + 'doc': 'dims: num_times * num_channels * num_bands', 'shape': (None, None, None)}, {'name': 'description', 'type': str, 'doc': 'Description of this TimeSeries dataset'}, {'name': 'metric', 'type': str, 'doc': "metric of analysis. recommended: 'phase', 'amplitude', 'power'"}, {'name': 'unit', 'type': str, 'doc': 'SI unit of measurement', 'default': 'no unit'}, From 5ec61443b50b06d6e63be0c3be74a16c466b0bdb Mon Sep 17 00:00:00 2001 From: Ben Dichter Date: Wed, 20 Feb 2019 12:39:50 -0800 Subject: [PATCH 2/6] typo: Intracom -> Intracomm --- src/pynwb/form/backends/hdf5/h5tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pynwb/form/backends/hdf5/h5tools.py b/src/pynwb/form/backends/hdf5/h5tools.py index 5474b4648..3b1039cd1 100644 --- a/src/pynwb/form/backends/hdf5/h5tools.py +++ b/src/pynwb/form/backends/hdf5/h5tools.py @@ -33,7 +33,7 @@ class HDF5IO(FORMIO): {'name': 'manager', 'type': BuildManager, 'doc': 'the BuildManager to use for I/O', 'default': None}, {'name': 'mode', 'type': str, 'doc': 'the mode to open the HDF5 file with, one of ("w", "r", "r+", "a", "w-")'}, - {'name': 'comm', 'type': 'Intracom', + {'name': 'comm', 'type': 'Intracomm', 'doc': 'the MPI communicator to use for parallel I/O', 'default': None}, {'name': 'file', 'type': File, 'doc': 'a pre-existing h5py.File object', 'default': None}) def __init__(self, **kwargs): From 5b0cfbe00f42ab37c95edcbd2e04b55f095cdb15 Mon Sep 17 00:00:00 2001 From: bendichter Date: Thu, 7 Mar 2019 15:54:37 -0500 Subject: [PATCH 3/6] rearrange tutorial and improve consistency --- docs/gallery/general/file.py | 305 ++++++++++++++++++----------------- 1 file changed, 154 insertions(+), 151 deletions(-) diff --git a/docs/gallery/general/file.py b/docs/gallery/general/file.py index 54c739183..d25439839 100644 --- a/docs/gallery/general/file.py +++ b/docs/gallery/general/file.py @@ -18,12 +18,15 @@ from datetime import datetime from dateutil.tz import tzlocal from pynwb import NWBFile +import numpy as np start_time = datetime(2017, 4, 3, 11, tzinfo=tzlocal()) create_date = datetime(2017, 4, 15, 12, tzinfo=tzlocal()) -nwbfile = NWBFile('demonstrate NWBFile basics', 'NWB123', start_time, - file_create_date=create_date) +nwbfile = NWBFile(session_description='demonstrate NWBFile basics', # required + identifier='NWB123', # required + session_start_time=start_time, # required + file_create_date=create_date) # optional #################### # .. _basic_timeseries: @@ -33,7 +36,7 @@ # # PyNWB stores time series data using the :py:class:`~pynwb.base.TimeSeries` class and its subclasses. # The main components of a :py:class:`~pynwb.base.TimeSeries` are the *data* and the *timestamps*. -# You will also need to supply a *source* and *description* of the data and the unit for *data*. +# You will also need to supply the name and unit of measurement for *data*. from pynwb import TimeSeries @@ -60,26 +63,95 @@ nwbfile.add_acquisition(test_ts) +#################### +# Access the :py:class:`~pynwb.base.TimeSeries` object `'test_timeseries'` from *acquisition* using + +nwbfile.acquisition['test_timeseries'] +# or +nwbfile.get_acquisition('test_timeseries') + +#################### +# .. _basic_writing: +# +# Writing an NWB file +# ------------------- +# +# NWB I/O is carried out using the :py:class:`~pynwb.NWBHDF5IO` class [#]_. This class is responsible +# for mapping an :py:class:`~pynwb.file.NWBFile` object into HDF5 according to the NWB schema. +# +# To write an :py:class:`~pynwb.file.NWBFile`, use the :py:func:`~pynwb.form.backends.io.FORMIO.write` method. + +from pynwb import NWBHDF5IO + +io = NWBHDF5IO('basic_example.nwb', mode='w') +io.write(nwbfile) +io.close() + +#################### +# You can also use :py:func:`~pynwb.NWBHDF5IO` as a context manager: + +with NWBHDF5IO('basic_example.nwb', 'w') as io: + io.write(nwbfile) + +#################### +# .. _basic_reading: +# +# Reading an NWB file +# ------------------- +# +# As with writing, reading is also carried out using the :py:class:`~pynwb.NWBHDF5IO` class. +# To read the NWB file we just wrote, using construct another :py:class:`~pynwb.NWBHDF5IO` object, +# and use the :py:func:`~pynwb.form.backends.io.FORMIO.read` method to retrieve an +# :py:class:`~pynwb.file.NWBFile` object. + +io = NWBHDF5IO('basic_example.nwb', 'r') +nwbfile_in = io.read() + +#################### +# .. _basic_retrieving_data: +# +# Retrieving data from an NWB file +# -------------------------------- + +test_timeseries_in = nwbfile_in.acquisition['test_timeseries'] +print(test_timeseries_in) + +#################### +# Accessing the data field, you will notice that it does not return the data values, but instead a `h5py.Dataset`. + +print(test_timeseries_in.data) + +#################### +# This object lets you only read in a section of the dataset without reading the entire thing. + +print(test_timeseries_in.data[:2]) + +#################### +# To load the entire dataset, use `[:]`. + +print(test_timeseries_in.data[:]) +io.close() + +#################### +# If you use :py:class:`~pynwb.NWBHDF5IO` as a context manager during read, be aware that the +# :py:class:`~pynwb.NWBHDF5IO` gets closed and when the context completes and the data will not be +# available outside of the context manager[#]_. + #################### # .. _reuse_timestamps: # # Reusing timestamps # ~~~~~~~~~~~~~~~~~~ # -# When working with multimodal data, it can be convenient and efficient to store timestamps once and associate multiple +# When working with multi-modal data, it can be convenient and efficient to store timestamps once and associate multiple # data with the single timestamps instance. PyNWB enables this by letting you reuse timestamps across # :class:`~pynwb.base.TimeSeries` objects. To reuse a :class:`~pynwb.base.TimeSeries` timestamps in a new -# :class:`~pynwb.base.TimeSeries`, pass the exising :class:`~pynwb.base.TimeSeries` as the new +# :class:`~pynwb.base.TimeSeries`, pass the existing :class:`~pynwb.base.TimeSeries` as the new # :class:`~pynwb.base.TimeSeries` timestamps: data = list(range(101, 201, 10)) reuse_ts = TimeSeries('reusing_timeseries', data, 'SIunit', timestamps=test_ts) -#################### -# And then add it to the NWBFile. - -nwbfile.add_acquisition(reuse_ts) - #################### # .. _basic_data_interfaces: # @@ -88,21 +160,41 @@ # # NWB provides the concept of a *data interface*--an object for a standard # storage location of specific types of data--through the :py:class:`~pynwb.base.NWBDataInterface` class. -# For example, :py:class:`~pynwb.behavior.BehavioralTimeSeries` provides a container for holding one or more -# :py:class:`~pynwb.base.TimeSeries` objects that store time series behavioral data. By putting -# your behavioral data into a :py:class:`~pynwb.behavior.BehavioralTimeSeries` container, downstream users and -# tools know where to look to retrieve behavioral data. For a comprehensive list of available data interfaces, see the +# For example, :py:class:`~pynwb.behavior.Position` provides a container that holds one or more +# :py:class:`~pynwb.base.SpatialSeries` objects. :py:class:`~pynwb.base.SpatialSeries` is a subtype of +# :py:class:`~pynwb.base.TimeSeries` that represents the spatial position of an animal over time. By putting +# your position data into a :py:class:`~pynwb.behavior.Position` container, downstream users and +# tools know where to look to retrieve position data. For a comprehensive list of available data interfaces, see the # :ref:`overview page ` -# -# :py:class:`~pynwb.base.NWBDataInterface` objects can be added as acquisition data, or as members -# of a :ref:`ProcessingModule ` -# -# For the purposes of demonstration, we will use a :py:class:`~pynwb.ecephys.LFP` data interface. -from pynwb.behavior import BehavioralTimeSeries +from pynwb.behavior import Position + +position = Position() -bts = BehavioralTimeSeries() -nwbfile.add_acquisition(bts) +#################### +# You can add objects to a data interface as a method of the data interface, + +position.create_spatial_series(name='position1', + data=np.linspace(0, 1, 20), + rate=50., + reference_frame='starting gate') + +#################### +# or you can add pre-existing objects, + +from pynwb.behavior import SpatialSeries + +spatial_series = SpatialSeries(name='position2', + data=np.linspace(0, 1, 20), + rate=50., + reference_frame='starting gate') + +position.add_spatial_series(spatial_series) + +#################### +# or include the object during construction. + +#position = Position(spatial_series=spatial_series) #################### # Each data interface stores its own type of data. We suggest you read the documentation for the @@ -111,7 +203,6 @@ #################### # .. _basic_procmod: -# # Processing modules # ------------------ # @@ -121,10 +212,11 @@ # the common first steps in spike sorting e.g. :py:class:`~pynwb.ecephys.EventDetection`, # :py:class:`~pynwb.ecephys.EventWaveform`, :py:class:`~pynwb.ecephys.FeatureExtraction`. The final results of # the sorting could then be stored in the top-level :py:class:`~pynwb.misc.Units` table (see below). -# -# Processing modules can be created using :py:func:`~pynwb.file.NWBFile.create_processing_module`: +# Derived preprocessed data should go in a processing module, which you can create using +# :py:func:`~pynwb.file.NWBFile.create_processing_module`: -created_mod = nwbfile.create_processing_module('created_mod', 'example module') +behavior_module = nwbfile.create_processing_module('behavior', + description='preprocessed behavioral data') #################### # or by directly calling the constructor and adding to the :py:class:`~pynwb.file.NWBFile` using @@ -132,19 +224,20 @@ from pynwb import ProcessingModule -added_mod = ProcessingModule('added_mod', 'example module') -nwbfile.add_processing_module(added_mod) +ecephys_module = ProcessingModule('ecephys', + description='preprocessed extracellular electrophysiology') +nwbfile.add_processing_module(ecephys_module) #################### -# You can add data to your processing module using the method -# :py:func:`~pynwb.base.ProcessingModule.add_data_interface`. -# Lets make another :py:class:`~pynwb.base.TimeSeries` and then add it to the -# :py:class:`~pynwb.base.ProcessingModule` we just added. +# Best practice is to use the NWB schema module names as processing module names where appropriate. +# These are: 'behavior', 'ecephys', 'icephys', 'ophys', 'ogen', 'retinotopy', and 'misc'. You may also create +# a processing module with a custom name. Once these processing modules are added, access them with -data = list(range(0, 100, 10)) -timestamps = list(range(10)) -mod_ts = TimeSeries('ts_for_mod', data, 'SIunit', timestamps=timestamps) -added_mod.add_data_interface(mod_ts) +nwbfile.modules + +# :py:class:`~pynwb.base.NWBDataInterface` objects can be added to the behavior :ref:`ProcessingModule `. + +nwbfile.modules['behavior'].add_data_interface(position) #################### # .. _basic_epochs: @@ -153,13 +246,13 @@ # ------ # # Epochs can be added to an NWB file using the method :py:func:`~pynwb.file.NWBFile.add_epoch`. -# The first argument is a description of the epoch, the second and third argument are the start time -# and stop time, respectively. The fourth argument is one or more tags for labelling the epoch, -# and the fifth argument is a list of all the :py:class:`~pynwb.base.TimeSeries` that the epoch applies +# The first and second arguments are the start time and stop times, respectively. +# The third argument is one or more tags for labelling the epoch, and the fifth argument is a +# list of all the :py:class:`~pynwb.base.TimeSeries` that the epoch applies # to. -nwbfile.add_epoch(2.0, 4.0, ['first', 'example'], [test_ts, mod_ts]) -nwbfile.add_epoch(6.0, 8.0, ['second', 'example'], [test_ts, mod_ts]) +nwbfile.add_epoch(2.0, 4.0, ['first', 'example'], [test_ts, ]) +nwbfile.add_epoch(6.0, 8.0, ['second', 'example'], [test_ts, ]) #################### # .. _basic_trials: @@ -186,15 +279,20 @@ nwbfile.add_trial(start_time=3.0, stop_time=5.0, stim='ocean') nwbfile.add_trial(start_time=6.0, stop_time=8.0, stim='desert') +#################### +# Tabular data such as trials can be converted to a `pandas.DataFrame`. + +print(nwbfile.trials.to_dataframe()) + #################### # .. _basic_units: # # Units # ------ # -# Unit metadata can be added to an NWB file using the methods :py:func:`~pynwb.file.NWBFile.add_unit` -# and :py:func:`~pynwb.file.NWBFile.add_unit_column`. These methods work like the methods for adding -# trials described :ref:`above ` +# Units are putative cells in your analysis. Unit metadata can be added to an NWB file using the methods +# :py:func:`~pynwb.file.NWBFile.add_unit` and :py:func:`~pynwb.file.NWBFile.add_unit_column`. These methods +# work like the methods for adding trials described :ref:`above ` # # A unit is only required to contain a unique integer identifier in the 'id' column # (this will be automatically assigned if not provided). Additional optional values for each unit @@ -226,114 +324,17 @@ obs_intervals=[[1, 10], [20, 30]], location='CA1', quality=0.90) #################### -# .. _units_fields_ref: -# -# .. note:: -# The Units table has some predefined optional columns. Please review the documentation for -# :py:func:`~pynwb.file.NWBFile.add_unit` before adding custom columns. - - -#################### -# .. _basic_writing: -# -# Writing an NWB file -# ------------------- -# -# NWB I/O is carried out using the :py:class:`~pynwb.NWBHDF5IO` class [#]_. This class is responsible -# for mapping an :py:class:`~pynwb.file.NWBFile` object into HDF5 according to the NWB schema. -# -# To write an :py:class:`~pynwb.file.NWBFile`, use the :py:func:`~pynwb.form.backends.io.FORMIO.write` method. - -from pynwb import NWBHDF5IO - -io = NWBHDF5IO('basic_example.nwb', mode='w') -io.write(nwbfile) -io.close() - -#################### -# You can also use :py:func:`~pynwb.NWBHDF5IO` as a context manager: +# Now we overwrite the file with all of the data with NWBHDF5IO('basic_example.nwb', 'w') as io: io.write(nwbfile) #################### -# .. _basic_reading: -# -# Reading an NWB file -# ------------------- -# -# As with writing, reading is also carried out using the :py:class:`~pynwb.NWBHDF5IO` class. -# To read the NWB file we just wrote, using construct another :py:class:`~pynwb.NWBHDF5IO` object, -# and use the :py:func:`~pynwb.form.backends.io.FORMIO.read` method to retrieve an -# :py:class:`~pynwb.file.NWBFile` object. - -io = NWBHDF5IO('basic_example.nwb', 'r') -nwbfile = io.read() - -#################### -# For reading, we cannot use :py:class:`~pynwb.NWBHDF5IO` as a context manager, since the resulting -# :py:class:`~pynwb.NWBHDF5IO` gets closed and deleted when the context completes [#]_. - -#################### -# .. _basic_retrieving_data: -# -# Retrieving data from an NWB file -# -------------------------------- -# -# Most of the methods we used above to write data are paired with a getter method for getting your data back. +# .. _units_fields_ref: # -# Lets start with the :py:class:`~pynwb.base.TimeSeries` object we wrote. Above, we added it as -# acquisition data using the method :py:func:`~pynwb.file.NWBFile.add_acquisition`. We can get it -# back in a couple ways. The first we just mentioned--a simple getter method. In the case of acquisition -# data, the method is :py:func:`~pynwb.file.NWBFile.get_acquisition`. The only argument this method needs -# is the name of the object you are trying to get. We named our :py:class:`~pynwb.base.TimeSeries` -# "test_timeseries": - -ts = nwbfile.get_acquisition('test_timeseries') - -#################### -# If you are not into *getter* methods, you can also retrieve this data by pulling it out of the -# :py:func:`~pynwb.file.NWBFile.acquisition` property. This property supports dict-like indexing. Again, -# all we need to supply is the name of the object we are looking for: - -ts = nwbfile.acquisition['test_timeseries'] - -#################### -# We can also get the :py:class:`~pynwb.ecephys.LFP` object back. When we created the :py:class:`~pynwb.ecephys.LFP` -# object, we did not supply a name, so the name defaulted to "LFP" [#]_. - -bts = nwbfile.acquisition['BehavioralTimeSeries'] - -#################### -# Just like acquisition data, we can get processing modules back in the same manner. We created two above. -# Lets read both, but using the two different ways. The first way, -# calling :py:func:`~pynwb.file.NWBFile.get_processing_module`: - -created_mod = nwbfile.get_processing_module('created_mod') - -#################### -# And the second way, indexing into :py:func:`~pynwb.file.NWBFile.modules` - -added_mod = nwbfile.modules['added_mod'] - -#################### -# Now that we have our :py:class:`~pynwb.base.ProcessingModule` back, we can get the :py:class:`~pynwb.base.TimeSeries` -# that we added to it back. Similar to :py:class:`~pynwb.file.NWBFile`, we have two ways of gettings this data back. -# The first is by using the getter :py:func:`~pynwb.base.ProcessingModule.get_data_interface` and passing in -# the name of the object we want back. - -mod_ts = added_mod.get_data_interface('ts_for_mod') - -#################### -# The second way is by indexing directly into the :py:class:`~pynwb.base.ProcessingModule` object and passing -# the name of the object we want back. - -mod_ts = added_mod['ts_for_mod'] - -#################### -# Close the file when we are done with it. - -io.close() +# .. note:: +# The Units table has some predefined optional columns. Please review the documentation for +# :py:func:`~pynwb.file.NWBFile.add_unit` before adding custom columns. #################### # .. _basic_appending: @@ -354,15 +355,17 @@ io = NWBHDF5IO('basic_example.nwb', mode='a') nwbfile = io.read() -bts = nwbfile.acquisition['BehavioralTimeSeries'] +position = nwbfile.modules['behavior'].data_interfaces['Position'] #################### -# Next, add a new :py:class:`~pynwb.base.TimeSeries`. +# Next, add a new :py:class:`~pynwb.base.SpatialSeries`. data = list(range(300, 400, 10)) timestamps = list(range(10)) -test_ts2 = TimeSeries('test_timeseries2', data, 'SIunit', timestamps=timestamps) -bts.add_timeseries(test_ts2) +test_spatial_series = SpatialSeries('test_spatialseries2', data, + reference_frame='starting_gate', + timestamps=timestamps) +position.add_spatial_series(test_spatial_series) #################### # Finally, write the changes back to the file and close it. @@ -377,7 +380,7 @@ # .. [#] HDF5 is currently the only backend supported by NWB. # # .. [#] Neurodata sets can be *very* large, so individual components of the dataset are only loaded into memory when -# you requst them. This functionality is only possible if an open file handle is kept around until users want to +# you request them. This functionality is only possible if an open file handle is kept around until users want to # load data. # # .. [#] Some data interface objects have a default name. This default name is the type of the data interface. For From 6a1f6f2287ad7f9f6633afe70d12ddfed5a751a1 Mon Sep 17 00:00:00 2001 From: bendichter Date: Thu, 7 Mar 2019 16:39:18 -0500 Subject: [PATCH 4/6] fix add_docs for NWBFile.add_trial --- src/pynwb/file.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pynwb/file.py b/src/pynwb/file.py index d35f3fbfd..5511439e3 100644 --- a/src/pynwb/file.py +++ b/src/pynwb/file.py @@ -522,11 +522,11 @@ def add_trial_column(self, **kwargs): self.__check_trials() call_docval_func(self.trials.add_column, kwargs) - @docval(*get_docval(TimeIntervals.add_row), allow_extra=True) + @docval(*get_docval(TimeIntervals.add_interval), allow_extra=True) def add_trial(self, **kwargs): """ Add a trial to the trial table. - See :py:meth:`~pynwb.core.DynamicTable.add_row` for more details. + See :py:meth:`~pynwb.core.DynamicTable.add_interval` for more details. Required fields are *start_time*, *stop_time*, and any columns that have been added (through calls to `add_trial_columns`). From 2fe8f1d6bdb0bc78fbf44e6ec39c9631cd879634 Mon Sep 17 00:00:00 2001 From: bendichter Date: Thu, 7 Mar 2019 17:24:49 -0500 Subject: [PATCH 5/6] fix doc for add_intervals --- src/pynwb/epoch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pynwb/epoch.py b/src/pynwb/epoch.py index a14588c62..6c3294af9 100644 --- a/src/pynwb/epoch.py +++ b/src/pynwb/epoch.py @@ -38,7 +38,7 @@ def __init__(self, **kwargs): @docval({'name': 'start_time', 'type': float, 'doc': 'Start time of epoch, in seconds'}, {'name': 'stop_time', 'type': float, 'doc': 'Stop time of epoch, in seconds'}, - {'name': 'tags', 'type': (str, list, tuple), 'doc': 'user-defined tags uesd throughout epochs', + {'name': 'tags', 'type': (str, list, tuple), 'doc': 'user-defined tags used throughout time intervals', 'default': None}, {'name': 'timeseries', 'type': (list, tuple, TimeSeries), 'doc': 'the TimeSeries this epoch applies to', 'default': None}, From 06f9fec25bfc5f76f69cc3f7fd80a0cbe17133a7 Mon Sep 17 00:00:00 2001 From: bendichter Date: Thu, 7 Mar 2019 17:35:48 -0500 Subject: [PATCH 6/6] fix up NWB basics docs --- docs/gallery/general/file.py | 82 +++++++++++++++++++++++++++++++----- 1 file changed, 71 insertions(+), 11 deletions(-) diff --git a/docs/gallery/general/file.py b/docs/gallery/general/file.py index d25439839..0be8d424c 100644 --- a/docs/gallery/general/file.py +++ b/docs/gallery/general/file.py @@ -59,7 +59,7 @@ # and :py:func:`~pynwb.file.NWBFile.add_stimulus_template`. Which method you use depends on the source of the # data: use :py:func:`~pynwb.file.NWBFile.add_acquisition` to indicated *acquisition* data, # :py:func:`~pynwb.file.NWBFile.add_stimulus` to indicate *stimulus* data, and -# :py:func:`~pynwb.file.NWBFile.add_stimulus_template` to store stimulus templates [#]_. +# :py:func:`~pynwb.file.NWBFile.add_stimulus_template` to store stimulus templates. nwbfile.add_acquisition(test_ts) @@ -67,6 +67,7 @@ # Access the :py:class:`~pynwb.base.TimeSeries` object `'test_timeseries'` from *acquisition* using nwbfile.acquisition['test_timeseries'] +#################### # or nwbfile.get_acquisition('test_timeseries') @@ -117,27 +118,59 @@ print(test_timeseries_in) #################### -# Accessing the data field, you will notice that it does not return the data values, but instead a `h5py.Dataset`. +# :: +# +# test_timeseries +# Fields: +# comments: no comments +# conversion: 1.0 +# data: +# description: no description +# interval: 1 +# num_samples: 10 +# resolution: 0.0 +# timestamps: +# timestamps_unit: Seconds +# unit: SIunit + +#################### +# Accessing the data field, you will notice that it does not return the data values, but instead an HDF5 dataset. print(test_timeseries_in.data) #################### +# :: +# +# +# # This object lets you only read in a section of the dataset without reading the entire thing. print(test_timeseries_in.data[:2]) #################### +# :: +# +# [100 110] +# # To load the entire dataset, use `[:]`. print(test_timeseries_in.data[:]) io.close() #################### +# :: +# +# [100 110 120 130 140 150 160 170 180 190] +# # If you use :py:class:`~pynwb.NWBHDF5IO` as a context manager during read, be aware that the # :py:class:`~pynwb.NWBHDF5IO` gets closed and when the context completes and the data will not be -# available outside of the context manager[#]_. +# available outside of the context manager [#]_. #################### +# Adding More Data +# ------------ +# The following illustrates basic data organizational structures that are used throughout NWB:N. +# # .. _reuse_timestamps: # # Reusing timestamps @@ -165,14 +198,15 @@ # :py:class:`~pynwb.base.TimeSeries` that represents the spatial position of an animal over time. By putting # your position data into a :py:class:`~pynwb.behavior.Position` container, downstream users and # tools know where to look to retrieve position data. For a comprehensive list of available data interfaces, see the -# :ref:`overview page ` +# :ref:`overview page `. Here is how tov create a :py:class:`~pynwb.behavior.Position` object +# named '`Position'` [#]_. from pynwb.behavior import Position position = Position() #################### -# You can add objects to a data interface as a method of the data interface, +# You can add objects to a data interface as a method of the data interface: position.create_spatial_series(name='position1', data=np.linspace(0, 1, 20), @@ -180,7 +214,7 @@ reference_frame='starting gate') #################### -# or you can add pre-existing objects, +# or you can add pre-existing objects: from pynwb.behavior import SpatialSeries @@ -192,9 +226,14 @@ position.add_spatial_series(spatial_series) #################### -# or include the object during construction. +# or include the object during construction: -#position = Position(spatial_series=spatial_series) +spatial_series = SpatialSeries(name='position2', + data=np.linspace(0, 1, 20), + rate=50., + reference_frame='starting gate') + +position = Position(spatial_series=spatial_series) #################### # Each data interface stores its own type of data. We suggest you read the documentation for the @@ -235,6 +274,20 @@ nwbfile.modules +#################### +# which returns a `dict`: +# :: +# +# {'behavior': +# behavior +# Fields: +# data_interfaces: { Position } +# description: preprocessed behavioral data, 'ecephys': +# ecephys +# Fields: +# data_interfaces: { } +# description: preprocessed extracellular electrophysiology} +# # :py:class:`~pynwb.base.NWBDataInterface` objects can be added to the behavior :ref:`ProcessingModule `. nwbfile.modules['behavior'].add_data_interface(position) @@ -284,6 +337,16 @@ print(nwbfile.trials.to_dataframe()) +#################### +# :: +# +# start_time stop_time stim +# id +# 0 0.0 2.0 person +# 1 3.0 5.0 ocean +# 2 6.0 8.0 desert +# + #################### # .. _basic_units: # @@ -374,9 +437,6 @@ io.close() #################### -# .. [#] Stimulus template data may change in the near future. The NWB team will work with interested parties -# at the `4th NWB Hackathon `_ to refine the schema for storing stimulus template data. -# # .. [#] HDF5 is currently the only backend supported by NWB. # # .. [#] Neurodata sets can be *very* large, so individual components of the dataset are only loaded into memory when