diff --git a/+types/+core/AbstractFeatureSeries.m b/+types/+core/AbstractFeatureSeries.m new file mode 100644 index 00000000..27e6aaf3 --- /dev/null +++ b/+types/+core/AbstractFeatureSeries.m @@ -0,0 +1,141 @@ +classdef AbstractFeatureSeries < types.core.TimeSeries & types.untyped.GroupClass +% ABSTRACTFEATURESERIES Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical. + + +% REQUIRED PROPERTIES +properties + features; % REQUIRED (char) Description of the features represented in TimeSeries::data. +end +% OPTIONAL PROPERTIES +properties + feature_units; % (char) Units of each feature. +end + +methods + function obj = AbstractFeatureSeries(varargin) + % ABSTRACTFEATURESERIES Constructor for AbstractFeatureSeries + varargin = [{'data_unit' 'see `feature_units`'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'feature_units',[]); + addParameter(p, 'features',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.feature_units = p.Results.feature_units; + obj.features = p.Results.features; + if strcmp(class(obj), 'types.core.AbstractFeatureSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.feature_units(obj, val) + obj.feature_units = obj.validate_feature_units(val); + end + function set.features(obj, val) + obj.features = obj.validate_features(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf], [Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + val = types.util.checkDtype('data_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_feature_units(obj, val) + val = types.util.checkDtype('feature_units', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_features(obj, val) + val = types.util.checkDtype('features', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.feature_units) + if startsWith(class(obj.feature_units), 'types.untyped.') + refs = obj.feature_units.export(fid, [fullpath '/feature_units'], refs); + elseif ~isempty(obj.feature_units) + io.writeDataset(fid, [fullpath '/feature_units'], obj.feature_units, 'forceArray'); + end + end + if startsWith(class(obj.features), 'types.untyped.') + refs = obj.features.export(fid, [fullpath '/features'], refs); + elseif ~isempty(obj.features) + io.writeDataset(fid, [fullpath '/features'], obj.features, 'forceArray'); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/AnnotationSeries.m b/+types/+core/AnnotationSeries.m new file mode 100644 index 00000000..0161877e --- /dev/null +++ b/+types/+core/AnnotationSeries.m @@ -0,0 +1,60 @@ +classdef AnnotationSeries < types.core.TimeSeries & types.untyped.GroupClass +% ANNOTATIONSERIES Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way. + + + +methods + function obj = AnnotationSeries(varargin) + % ANNOTATIONSERIES Constructor for AnnotationSeries + varargin = [{'data_resolution' types.util.correctType(-1, 'single') 'data_unit' 'n/a'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_resolution',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_resolution = p.Results.data_resolution; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.AnnotationSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/BehavioralEpochs.m b/+types/+core/BehavioralEpochs.m new file mode 100644 index 00000000..c697b625 --- /dev/null +++ b/+types/+core/BehavioralEpochs.m @@ -0,0 +1,50 @@ +classdef BehavioralEpochs < types.core.NWBDataInterface & types.untyped.GroupClass +% BEHAVIORALEPOCHS TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data. + + +% OPTIONAL PROPERTIES +properties + intervalseries; % (IntervalSeries) IntervalSeries object containing start and stop times of epochs. +end + +methods + function obj = BehavioralEpochs(varargin) + % BEHAVIORALEPOCHS Constructor for BehavioralEpochs + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.intervalseries, ivarargin] = types.util.parseConstrained(obj,'intervalseries', 'types.core.IntervalSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.BehavioralEpochs') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.intervalseries(obj, val) + obj.intervalseries = obj.validate_intervalseries(val); + end + %% VALIDATORS + + function val = validate_intervalseries(obj, val) + namedprops = struct(); + constrained = {'types.core.IntervalSeries'}; + types.util.checkSet('intervalseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.intervalseries) + refs = obj.intervalseries.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/BehavioralEvents.m b/+types/+core/BehavioralEvents.m new file mode 100644 index 00000000..a5d94e28 --- /dev/null +++ b/+types/+core/BehavioralEvents.m @@ -0,0 +1,50 @@ +classdef BehavioralEvents < types.core.NWBDataInterface & types.untyped.GroupClass +% BEHAVIORALEVENTS TimeSeries for storing behavioral events. See description of BehavioralEpochs for more details. + + +% OPTIONAL PROPERTIES +properties + timeseries; % (TimeSeries) TimeSeries object containing behavioral events. +end + +methods + function obj = BehavioralEvents(varargin) + % BEHAVIORALEVENTS Constructor for BehavioralEvents + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.timeseries, ivarargin] = types.util.parseConstrained(obj,'timeseries', 'types.core.TimeSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.BehavioralEvents') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.timeseries(obj, val) + obj.timeseries = obj.validate_timeseries(val); + end + %% VALIDATORS + + function val = validate_timeseries(obj, val) + namedprops = struct(); + constrained = {'types.core.TimeSeries'}; + types.util.checkSet('timeseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.timeseries) + refs = obj.timeseries.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/BehavioralTimeSeries.m b/+types/+core/BehavioralTimeSeries.m new file mode 100644 index 00000000..4f697bae --- /dev/null +++ b/+types/+core/BehavioralTimeSeries.m @@ -0,0 +1,50 @@ +classdef BehavioralTimeSeries < types.core.NWBDataInterface & types.untyped.GroupClass +% BEHAVIORALTIMESERIES TimeSeries for storing Behavoioral time series data. See description of BehavioralEpochs for more details. + + +% OPTIONAL PROPERTIES +properties + timeseries; % (TimeSeries) TimeSeries object containing continuous behavioral data. +end + +methods + function obj = BehavioralTimeSeries(varargin) + % BEHAVIORALTIMESERIES Constructor for BehavioralTimeSeries + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.timeseries, ivarargin] = types.util.parseConstrained(obj,'timeseries', 'types.core.TimeSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.BehavioralTimeSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.timeseries(obj, val) + obj.timeseries = obj.validate_timeseries(val); + end + %% VALIDATORS + + function val = validate_timeseries(obj, val) + namedprops = struct(); + constrained = {'types.core.TimeSeries'}; + types.util.checkSet('timeseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.timeseries) + refs = obj.timeseries.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/ClusterWaveforms.m b/+types/+core/ClusterWaveforms.m new file mode 100644 index 00000000..be7285f1 --- /dev/null +++ b/+types/+core/ClusterWaveforms.m @@ -0,0 +1,137 @@ +classdef ClusterWaveforms < types.core.NWBDataInterface & types.untyped.GroupClass +% CLUSTERWAVEFORMS DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. + + +% REQUIRED PROPERTIES +properties + waveform_filtering; % REQUIRED (char) Filtering applied to data before generating mean/sd + waveform_mean; % REQUIRED (single) The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled) + waveform_sd; % REQUIRED (single) Stdev of waveforms for each cluster, using the same indices as in mean +end +% OPTIONAL PROPERTIES +properties + clustering_interface; % Clustering +end + +methods + function obj = ClusterWaveforms(varargin) + % CLUSTERWAVEFORMS Constructor for ClusterWaveforms + obj = obj@types.core.NWBDataInterface(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'clustering_interface',[]); + addParameter(p, 'waveform_filtering',[]); + addParameter(p, 'waveform_mean',[]); + addParameter(p, 'waveform_sd',[]); + misc.parseSkipInvalidName(p, varargin); + obj.clustering_interface = p.Results.clustering_interface; + obj.waveform_filtering = p.Results.waveform_filtering; + obj.waveform_mean = p.Results.waveform_mean; + obj.waveform_sd = p.Results.waveform_sd; + if strcmp(class(obj), 'types.core.ClusterWaveforms') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.clustering_interface(obj, val) + obj.clustering_interface = obj.validate_clustering_interface(val); + end + function set.waveform_filtering(obj, val) + obj.waveform_filtering = obj.validate_waveform_filtering(val); + end + function set.waveform_mean(obj, val) + obj.waveform_mean = obj.validate_waveform_mean(val); + end + function set.waveform_sd(obj, val) + obj.waveform_sd = obj.validate_waveform_sd(val); + end + %% VALIDATORS + + function val = validate_clustering_interface(obj, val) + val = types.util.checkDtype('clustering_interface', 'types.core.Clustering', val); + end + function val = validate_waveform_filtering(obj, val) + val = types.util.checkDtype('waveform_filtering', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_waveform_mean(obj, val) + val = types.util.checkDtype('waveform_mean', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_waveform_sd(obj, val) + val = types.util.checkDtype('waveform_sd', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.clustering_interface.export(fid, [fullpath '/clustering_interface'], refs); + if startsWith(class(obj.waveform_filtering), 'types.untyped.') + refs = obj.waveform_filtering.export(fid, [fullpath '/waveform_filtering'], refs); + elseif ~isempty(obj.waveform_filtering) + io.writeDataset(fid, [fullpath '/waveform_filtering'], obj.waveform_filtering); + end + if startsWith(class(obj.waveform_mean), 'types.untyped.') + refs = obj.waveform_mean.export(fid, [fullpath '/waveform_mean'], refs); + elseif ~isempty(obj.waveform_mean) + io.writeDataset(fid, [fullpath '/waveform_mean'], obj.waveform_mean, 'forceArray'); + end + if startsWith(class(obj.waveform_sd), 'types.untyped.') + refs = obj.waveform_sd.export(fid, [fullpath '/waveform_sd'], refs); + elseif ~isempty(obj.waveform_sd) + io.writeDataset(fid, [fullpath '/waveform_sd'], obj.waveform_sd, 'forceArray'); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Clustering.m b/+types/+core/Clustering.m new file mode 100644 index 00000000..ab8dc279 --- /dev/null +++ b/+types/+core/Clustering.m @@ -0,0 +1,153 @@ +classdef Clustering < types.core.NWBDataInterface & types.untyped.GroupClass +% CLUSTERING DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting. + + +% REQUIRED PROPERTIES +properties + description; % REQUIRED (char) Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc) + num; % REQUIRED (int32) Cluster number of each event + peak_over_rms; % REQUIRED (single) Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric). + times; % REQUIRED (double) Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module. +end + +methods + function obj = Clustering(varargin) + % CLUSTERING Constructor for Clustering + obj = obj@types.core.NWBDataInterface(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'num',[]); + addParameter(p, 'peak_over_rms',[]); + addParameter(p, 'times',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.num = p.Results.num; + obj.peak_over_rms = p.Results.peak_over_rms; + obj.times = p.Results.times; + if strcmp(class(obj), 'types.core.Clustering') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.num(obj, val) + obj.num = obj.validate_num(val); + end + function set.peak_over_rms(obj, val) + obj.peak_over_rms = obj.validate_peak_over_rms(val); + end + function set.times(obj, val) + obj.times = obj.validate_times(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_num(obj, val) + val = types.util.checkDtype('num', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_peak_over_rms(obj, val) + val = types.util.checkDtype('peak_over_rms', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_times(obj, val) + val = types.util.checkDtype('times', 'double', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if startsWith(class(obj.description), 'types.untyped.') + refs = obj.description.export(fid, [fullpath '/description'], refs); + elseif ~isempty(obj.description) + io.writeDataset(fid, [fullpath '/description'], obj.description); + end + if startsWith(class(obj.num), 'types.untyped.') + refs = obj.num.export(fid, [fullpath '/num'], refs); + elseif ~isempty(obj.num) + io.writeDataset(fid, [fullpath '/num'], obj.num, 'forceArray'); + end + if startsWith(class(obj.peak_over_rms), 'types.untyped.') + refs = obj.peak_over_rms.export(fid, [fullpath '/peak_over_rms'], refs); + elseif ~isempty(obj.peak_over_rms) + io.writeDataset(fid, [fullpath '/peak_over_rms'], obj.peak_over_rms, 'forceArray'); + end + if startsWith(class(obj.times), 'types.untyped.') + refs = obj.times.export(fid, [fullpath '/times'], refs); + elseif ~isempty(obj.times) + io.writeDataset(fid, [fullpath '/times'], obj.times, 'forceArray'); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/CompassDirection.m b/+types/+core/CompassDirection.m new file mode 100644 index 00000000..fb301492 --- /dev/null +++ b/+types/+core/CompassDirection.m @@ -0,0 +1,50 @@ +classdef CompassDirection < types.core.NWBDataInterface & types.untyped.GroupClass +% COMPASSDIRECTION With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees. + + +% OPTIONAL PROPERTIES +properties + spatialseries; % (SpatialSeries) SpatialSeries object containing direction of gaze travel. +end + +methods + function obj = CompassDirection(varargin) + % COMPASSDIRECTION Constructor for CompassDirection + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.spatialseries, ivarargin] = types.util.parseConstrained(obj,'spatialseries', 'types.core.SpatialSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.CompassDirection') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.spatialseries(obj, val) + obj.spatialseries = obj.validate_spatialseries(val); + end + %% VALIDATORS + + function val = validate_spatialseries(obj, val) + namedprops = struct(); + constrained = {'types.core.SpatialSeries'}; + types.util.checkSet('spatialseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.spatialseries) + refs = obj.spatialseries.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/CorrectedImageStack.m b/+types/+core/CorrectedImageStack.m new file mode 100644 index 00000000..be8f351d --- /dev/null +++ b/+types/+core/CorrectedImageStack.m @@ -0,0 +1,70 @@ +classdef CorrectedImageStack < types.core.NWBDataInterface & types.untyped.GroupClass +% CORRECTEDIMAGESTACK Reuslts from motion correction of an image stack. + + +% REQUIRED PROPERTIES +properties + corrected; % REQUIRED (ImageSeries) Image stack with frames shifted to the common coordinates. + xy_translation; % REQUIRED (TimeSeries) Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image. +end +% OPTIONAL PROPERTIES +properties + original; % ImageSeries +end + +methods + function obj = CorrectedImageStack(varargin) + % CORRECTEDIMAGESTACK Constructor for CorrectedImageStack + obj = obj@types.core.NWBDataInterface(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'corrected',[]); + addParameter(p, 'original',[]); + addParameter(p, 'xy_translation',[]); + misc.parseSkipInvalidName(p, varargin); + obj.corrected = p.Results.corrected; + obj.original = p.Results.original; + obj.xy_translation = p.Results.xy_translation; + if strcmp(class(obj), 'types.core.CorrectedImageStack') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.corrected(obj, val) + obj.corrected = obj.validate_corrected(val); + end + function set.original(obj, val) + obj.original = obj.validate_original(val); + end + function set.xy_translation(obj, val) + obj.xy_translation = obj.validate_xy_translation(val); + end + %% VALIDATORS + + function val = validate_corrected(obj, val) + val = types.util.checkDtype('corrected', 'types.core.ImageSeries', val); + end + function val = validate_original(obj, val) + val = types.util.checkDtype('original', 'types.core.ImageSeries', val); + end + function val = validate_xy_translation(obj, val) + val = types.util.checkDtype('xy_translation', 'types.core.TimeSeries', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.corrected.export(fid, [fullpath '/corrected'], refs); + refs = obj.original.export(fid, [fullpath '/original'], refs); + refs = obj.xy_translation.export(fid, [fullpath '/xy_translation'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/CurrentClampSeries.m b/+types/+core/CurrentClampSeries.m new file mode 100644 index 00000000..103edcc1 --- /dev/null +++ b/+types/+core/CurrentClampSeries.m @@ -0,0 +1,138 @@ +classdef CurrentClampSeries < types.core.PatchClampSeries & types.untyped.GroupClass +% CURRENTCLAMPSERIES Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected. + + +% OPTIONAL PROPERTIES +properties + bias_current; % (single) Bias current, in amps. + bridge_balance; % (single) Bridge balance, in ohms. + capacitance_compensation; % (single) Capacitance compensation, in farads. +end + +methods + function obj = CurrentClampSeries(varargin) + % CURRENTCLAMPSERIES Constructor for CurrentClampSeries + varargin = [{'data_unit' 'volts'} varargin]; + obj = obj@types.core.PatchClampSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'bias_current',[]); + addParameter(p, 'bridge_balance',[]); + addParameter(p, 'capacitance_compensation',[]); + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.bias_current = p.Results.bias_current; + obj.bridge_balance = p.Results.bridge_balance; + obj.capacitance_compensation = p.Results.capacitance_compensation; + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.CurrentClampSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.bias_current(obj, val) + obj.bias_current = obj.validate_bias_current(val); + end + function set.bridge_balance(obj, val) + obj.bridge_balance = obj.validate_bridge_balance(val); + end + function set.capacitance_compensation(obj, val) + obj.capacitance_compensation = obj.validate_capacitance_compensation(val); + end + %% VALIDATORS + + function val = validate_bias_current(obj, val) + val = types.util.checkDtype('bias_current', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_bridge_balance(obj, val) + val = types.util.checkDtype('bridge_balance', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_capacitance_compensation(obj, val) + val = types.util.checkDtype('capacitance_compensation', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data(obj, val) + + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.PatchClampSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.bias_current) + if startsWith(class(obj.bias_current), 'types.untyped.') + refs = obj.bias_current.export(fid, [fullpath '/bias_current'], refs); + elseif ~isempty(obj.bias_current) + io.writeDataset(fid, [fullpath '/bias_current'], obj.bias_current); + end + end + if ~isempty(obj.bridge_balance) + if startsWith(class(obj.bridge_balance), 'types.untyped.') + refs = obj.bridge_balance.export(fid, [fullpath '/bridge_balance'], refs); + elseif ~isempty(obj.bridge_balance) + io.writeDataset(fid, [fullpath '/bridge_balance'], obj.bridge_balance); + end + end + if ~isempty(obj.capacitance_compensation) + if startsWith(class(obj.capacitance_compensation), 'types.untyped.') + refs = obj.capacitance_compensation.export(fid, [fullpath '/capacitance_compensation'], refs); + elseif ~isempty(obj.capacitance_compensation) + io.writeDataset(fid, [fullpath '/capacitance_compensation'], obj.capacitance_compensation); + end + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/CurrentClampStimulusSeries.m b/+types/+core/CurrentClampStimulusSeries.m new file mode 100644 index 00000000..2683d405 --- /dev/null +++ b/+types/+core/CurrentClampStimulusSeries.m @@ -0,0 +1,43 @@ +classdef CurrentClampStimulusSeries < types.core.PatchClampSeries & types.untyped.GroupClass +% CURRENTCLAMPSTIMULUSSERIES Stimulus current applied during current clamp recording. + + + +methods + function obj = CurrentClampStimulusSeries(varargin) + % CURRENTCLAMPSTIMULUSSERIES Constructor for CurrentClampStimulusSeries + varargin = [{'data_unit' 'amperes'} varargin]; + obj = obj@types.core.PatchClampSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.CurrentClampStimulusSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.PatchClampSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/DecompositionSeries.m b/+types/+core/DecompositionSeries.m new file mode 100644 index 00000000..118c7e5c --- /dev/null +++ b/+types/+core/DecompositionSeries.m @@ -0,0 +1,144 @@ +classdef DecompositionSeries < types.core.TimeSeries & types.untyped.GroupClass +% DECOMPOSITIONSERIES Spectral analysis of a time series, e.g. of an LFP or a speech signal. + + +% REQUIRED PROPERTIES +properties + bands; % REQUIRED (DynamicTable) Table for describing the bands that this series was generated from. There should be one row in this table for each band. + metric; % REQUIRED (char) The metric used, e.g. phase, amplitude, power. +end +% OPTIONAL PROPERTIES +properties + source_channels; % (DynamicTableRegion) DynamicTableRegion pointer to the channels that this decomposition series was generated from. + source_timeseries; % TimeSeries +end + +methods + function obj = DecompositionSeries(varargin) + % DECOMPOSITIONSERIES Constructor for DecompositionSeries + varargin = [{'data_unit' 'no unit'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'bands',[]); + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'metric',[]); + addParameter(p, 'source_channels',[]); + addParameter(p, 'source_timeseries',[]); + misc.parseSkipInvalidName(p, varargin); + obj.bands = p.Results.bands; + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.metric = p.Results.metric; + obj.source_channels = p.Results.source_channels; + obj.source_timeseries = p.Results.source_timeseries; + if strcmp(class(obj), 'types.core.DecompositionSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.bands(obj, val) + obj.bands = obj.validate_bands(val); + end + function set.metric(obj, val) + obj.metric = obj.validate_metric(val); + end + function set.source_channels(obj, val) + obj.source_channels = obj.validate_source_channels(val); + end + function set.source_timeseries(obj, val) + obj.source_timeseries = obj.validate_source_timeseries(val); + end + %% VALIDATORS + + function val = validate_bands(obj, val) + val = types.util.checkDtype('bands', 'types.hdmf_common.DynamicTable', val); + end + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + val = types.util.checkDtype('data_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_metric(obj, val) + val = types.util.checkDtype('metric', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_source_channels(obj, val) + val = types.util.checkDtype('source_channels', 'types.hdmf_common.DynamicTableRegion', val); + end + function val = validate_source_timeseries(obj, val) + val = types.util.checkDtype('source_timeseries', 'types.core.TimeSeries', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.bands.export(fid, [fullpath '/bands'], refs); + if startsWith(class(obj.metric), 'types.untyped.') + refs = obj.metric.export(fid, [fullpath '/metric'], refs); + elseif ~isempty(obj.metric) + io.writeDataset(fid, [fullpath '/metric'], obj.metric); + end + if ~isempty(obj.source_channels) + refs = obj.source_channels.export(fid, [fullpath '/source_channels'], refs); + end + if ~isempty(obj.source_timeseries) + refs = obj.source_timeseries.export(fid, [fullpath '/source_timeseries'], refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Device.m b/+types/+core/Device.m new file mode 100644 index 00000000..a626863b --- /dev/null +++ b/+types/+core/Device.m @@ -0,0 +1,91 @@ +classdef Device < types.core.NWBContainer & types.untyped.GroupClass +% DEVICE Metadata about a data acquisition device, e.g., recording system, electrode, microscope. + + +% OPTIONAL PROPERTIES +properties + description; % (char) Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. + manufacturer; % (char) The name of the manufacturer of the device. +end + +methods + function obj = Device(varargin) + % DEVICE Constructor for Device + obj = obj@types.core.NWBContainer(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'manufacturer',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.manufacturer = p.Results.manufacturer; + if strcmp(class(obj), 'types.core.Device') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.manufacturer(obj, val) + obj.manufacturer = obj.validate_manufacturer(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_manufacturer(obj, val) + val = types.util.checkDtype('manufacturer', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.description) + io.writeAttribute(fid, [fullpath '/description'], obj.description); + end + if ~isempty(obj.manufacturer) + io.writeAttribute(fid, [fullpath '/manufacturer'], obj.manufacturer); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/DfOverF.m b/+types/+core/DfOverF.m new file mode 100644 index 00000000..50748605 --- /dev/null +++ b/+types/+core/DfOverF.m @@ -0,0 +1,48 @@ +classdef DfOverF < types.core.NWBDataInterface & types.untyped.GroupClass +% DFOVERF dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). + + +% REQUIRED PROPERTIES +properties + roiresponseseries; % REQUIRED (RoiResponseSeries) RoiResponseSeries object(s) containing dF/F for a ROI. +end + +methods + function obj = DfOverF(varargin) + % DFOVERF Constructor for DfOverF + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.roiresponseseries, ivarargin] = types.util.parseConstrained(obj,'roiresponseseries', 'types.core.RoiResponseSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.DfOverF') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.roiresponseseries(obj, val) + obj.roiresponseseries = obj.validate_roiresponseseries(val); + end + %% VALIDATORS + + function val = validate_roiresponseseries(obj, val) + namedprops = struct(); + constrained = {'types.core.RoiResponseSeries'}; + types.util.checkSet('roiresponseseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.roiresponseseries.export(fid, fullpath, refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/ElectricalSeries.m b/+types/+core/ElectricalSeries.m new file mode 100644 index 00000000..523459f0 --- /dev/null +++ b/+types/+core/ElectricalSeries.m @@ -0,0 +1,140 @@ +classdef ElectricalSeries < types.core.TimeSeries & types.untyped.GroupClass +% ELECTRICALSERIES A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels. + + +% READONLY PROPERTIES +properties(SetAccess = protected) + channel_conversion_axis; % (int32) The zero-indexed axis of the 'data' dataset that the channel-specific conversion factor corresponds to. This value is fixed to 1. +end +% REQUIRED PROPERTIES +properties + electrodes; % REQUIRED (DynamicTableRegion) DynamicTableRegion pointer to the electrodes that this time series was generated from. +end +% OPTIONAL PROPERTIES +properties + channel_conversion; % (single) Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels. + filtering; % (char) Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be "High-pass 4-pole Bessel filter at 500 Hz". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be "Low-pass filter at 300 Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible. +end + +methods + function obj = ElectricalSeries(varargin) + % ELECTRICALSERIES Constructor for ElectricalSeries + varargin = [{'channel_conversion_axis' types.util.correctType(1, 'int32') 'data_unit' 'volts'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'channel_conversion',[]); + addParameter(p, 'channel_conversion_axis',[]); + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'electrodes',[]); + addParameter(p, 'filtering',[]); + misc.parseSkipInvalidName(p, varargin); + obj.channel_conversion = p.Results.channel_conversion; + obj.channel_conversion_axis = p.Results.channel_conversion_axis; + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.electrodes = p.Results.electrodes; + obj.filtering = p.Results.filtering; + if strcmp(class(obj), 'types.core.ElectricalSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.channel_conversion(obj, val) + obj.channel_conversion = obj.validate_channel_conversion(val); + end + function set.electrodes(obj, val) + obj.electrodes = obj.validate_electrodes(val); + end + function set.filtering(obj, val) + obj.filtering = obj.validate_filtering(val); + end + %% VALIDATORS + + function val = validate_channel_conversion(obj, val) + val = types.util.checkDtype('channel_conversion', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf,Inf], [Inf,Inf], [Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_electrodes(obj, val) + val = types.util.checkDtype('electrodes', 'types.hdmf_common.DynamicTableRegion', val); + end + function val = validate_filtering(obj, val) + val = types.util.checkDtype('filtering', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.channel_conversion) + if startsWith(class(obj.channel_conversion), 'types.untyped.') + refs = obj.channel_conversion.export(fid, [fullpath '/channel_conversion'], refs); + elseif ~isempty(obj.channel_conversion) + io.writeDataset(fid, [fullpath '/channel_conversion'], obj.channel_conversion, 'forceArray'); + end + end + if ~isempty(obj.channel_conversion) && ~isa(obj.channel_conversion, 'types.untyped.SoftLink') && ~isa(obj.channel_conversion, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/channel_conversion/axis'], obj.channel_conversion_axis); + end + refs = obj.electrodes.export(fid, [fullpath '/electrodes'], refs); + if ~isempty(obj.filtering) + io.writeAttribute(fid, [fullpath '/filtering'], obj.filtering); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/ElectrodeGroup.m b/+types/+core/ElectrodeGroup.m new file mode 100644 index 00000000..174c0e6d --- /dev/null +++ b/+types/+core/ElectrodeGroup.m @@ -0,0 +1,138 @@ +classdef ElectrodeGroup < types.core.NWBContainer & types.untyped.GroupClass +% ELECTRODEGROUP A physical grouping of electrodes, e.g. a shank of an array. + + +% OPTIONAL PROPERTIES +properties + description; % (char) Description of this electrode group. + device; % Device + location; % (char) Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. + position; % (Table with columns: (x = single, y = single, z = single)) stereotaxic or common framework coordinates +end + +methods + function obj = ElectrodeGroup(varargin) + % ELECTRODEGROUP Constructor for ElectrodeGroup + obj = obj@types.core.NWBContainer(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'device',[]); + addParameter(p, 'location',[]); + addParameter(p, 'position',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.device = p.Results.device; + obj.location = p.Results.location; + obj.position = p.Results.position; + if strcmp(class(obj), 'types.core.ElectrodeGroup') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.device(obj, val) + obj.device = obj.validate_device(val); + end + function set.location(obj, val) + obj.location = obj.validate_location(val); + end + function set.position(obj, val) + obj.position = obj.validate_position(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_device(obj, val) + val = types.util.checkDtype('device', 'types.core.Device', val); + end + function val = validate_location(obj, val) + val = types.util.checkDtype('location', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_position(obj, val) + if isempty(val) || isa(val, 'types.untyped.DataStub') + return; + end + if ~istable(val) && ~isstruct(val) && ~isa(val, 'containers.Map') + error('Property `position` must be a table,struct, or containers.Map.'); + end + vprops = struct(); + vprops.x = 'single'; + vprops.y = 'single'; + vprops.z = 'single'; + val = types.util.checkDtype('position', vprops, val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/description'], obj.description); + refs = obj.device.export(fid, [fullpath '/device'], refs); + io.writeAttribute(fid, [fullpath '/location'], obj.location); + if ~isempty(obj.position) + if startsWith(class(obj.position), 'types.untyped.') + refs = obj.position.export(fid, [fullpath '/position'], refs); + elseif ~isempty(obj.position) + io.writeCompound(fid, [fullpath '/position'], obj.position); + end + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/EventDetection.m b/+types/+core/EventDetection.m new file mode 100644 index 00000000..6a7cfe46 --- /dev/null +++ b/+types/+core/EventDetection.m @@ -0,0 +1,147 @@ +classdef EventDetection < types.core.NWBDataInterface & types.untyped.GroupClass +% EVENTDETECTION Detected spike events from voltage trace(s). + + +% READONLY PROPERTIES +properties(SetAccess = protected) + times_unit; % (char) Unit of measurement for event times, which is fixed to 'seconds'. +end +% REQUIRED PROPERTIES +properties + detection_method; % REQUIRED (char) Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values. + source_idx; % REQUIRED (int32) Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data. + times; % REQUIRED (double) Timestamps of events, in seconds. +end +% OPTIONAL PROPERTIES +properties + source_electricalseries; % ElectricalSeries +end + +methods + function obj = EventDetection(varargin) + % EVENTDETECTION Constructor for EventDetection + varargin = [{'times_unit' 'seconds'} varargin]; + obj = obj@types.core.NWBDataInterface(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'detection_method',[]); + addParameter(p, 'source_electricalseries',[]); + addParameter(p, 'source_idx',[]); + addParameter(p, 'times',[]); + addParameter(p, 'times_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.detection_method = p.Results.detection_method; + obj.source_electricalseries = p.Results.source_electricalseries; + obj.source_idx = p.Results.source_idx; + obj.times = p.Results.times; + obj.times_unit = p.Results.times_unit; + if strcmp(class(obj), 'types.core.EventDetection') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.detection_method(obj, val) + obj.detection_method = obj.validate_detection_method(val); + end + function set.source_electricalseries(obj, val) + obj.source_electricalseries = obj.validate_source_electricalseries(val); + end + function set.source_idx(obj, val) + obj.source_idx = obj.validate_source_idx(val); + end + function set.times(obj, val) + obj.times = obj.validate_times(val); + end + %% VALIDATORS + + function val = validate_detection_method(obj, val) + val = types.util.checkDtype('detection_method', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_source_electricalseries(obj, val) + val = types.util.checkDtype('source_electricalseries', 'types.core.ElectricalSeries', val); + end + function val = validate_source_idx(obj, val) + val = types.util.checkDtype('source_idx', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_times(obj, val) + val = types.util.checkDtype('times', 'double', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if startsWith(class(obj.detection_method), 'types.untyped.') + refs = obj.detection_method.export(fid, [fullpath '/detection_method'], refs); + elseif ~isempty(obj.detection_method) + io.writeDataset(fid, [fullpath '/detection_method'], obj.detection_method); + end + refs = obj.source_electricalseries.export(fid, [fullpath '/source_electricalseries'], refs); + if startsWith(class(obj.source_idx), 'types.untyped.') + refs = obj.source_idx.export(fid, [fullpath '/source_idx'], refs); + elseif ~isempty(obj.source_idx) + io.writeDataset(fid, [fullpath '/source_idx'], obj.source_idx, 'forceArray'); + end + if startsWith(class(obj.times), 'types.untyped.') + refs = obj.times.export(fid, [fullpath '/times'], refs); + elseif ~isempty(obj.times) + io.writeDataset(fid, [fullpath '/times'], obj.times, 'forceArray'); + end + if ~isempty(obj.times) && ~isa(obj.times, 'types.untyped.SoftLink') && ~isa(obj.times, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/times/unit'], obj.times_unit); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/EventWaveform.m b/+types/+core/EventWaveform.m new file mode 100644 index 00000000..375c758c --- /dev/null +++ b/+types/+core/EventWaveform.m @@ -0,0 +1,50 @@ +classdef EventWaveform < types.core.NWBDataInterface & types.untyped.GroupClass +% EVENTWAVEFORM Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition. + + +% OPTIONAL PROPERTIES +properties + spikeeventseries; % (SpikeEventSeries) SpikeEventSeries object(s) containing detected spike event waveforms. +end + +methods + function obj = EventWaveform(varargin) + % EVENTWAVEFORM Constructor for EventWaveform + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.spikeeventseries, ivarargin] = types.util.parseConstrained(obj,'spikeeventseries', 'types.core.SpikeEventSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.EventWaveform') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.spikeeventseries(obj, val) + obj.spikeeventseries = obj.validate_spikeeventseries(val); + end + %% VALIDATORS + + function val = validate_spikeeventseries(obj, val) + namedprops = struct(); + constrained = {'types.core.SpikeEventSeries'}; + types.util.checkSet('spikeeventseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.spikeeventseries) + refs = obj.spikeeventseries.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/ExperimentalConditionsTable.m b/+types/+core/ExperimentalConditionsTable.m new file mode 100644 index 00000000..5dbc082f --- /dev/null +++ b/+types/+core/ExperimentalConditionsTable.m @@ -0,0 +1,60 @@ +classdef ExperimentalConditionsTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% EXPERIMENTALCONDITIONSTABLE A table for grouping different intracellular recording repetitions together that belong to the same experimental condition. + + +% REQUIRED PROPERTIES +properties + repetitions; % REQUIRED (DynamicTableRegion) A reference to one or more rows in the RepetitionsTable table. + repetitions_index; % REQUIRED (VectorIndex) Index dataset for the repetitions column. +end + +methods + function obj = ExperimentalConditionsTable(varargin) + % EXPERIMENTALCONDITIONSTABLE Constructor for ExperimentalConditionsTable + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'repetitions',[]); + addParameter(p, 'repetitions_index',[]); + misc.parseSkipInvalidName(p, varargin); + obj.repetitions = p.Results.repetitions; + obj.repetitions_index = p.Results.repetitions_index; + if strcmp(class(obj), 'types.core.ExperimentalConditionsTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.ExperimentalConditionsTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.repetitions(obj, val) + obj.repetitions = obj.validate_repetitions(val); + end + function set.repetitions_index(obj, val) + obj.repetitions_index = obj.validate_repetitions_index(val); + end + %% VALIDATORS + + function val = validate_repetitions(obj, val) + val = types.util.checkDtype('repetitions', 'types.hdmf_common.DynamicTableRegion', val); + end + function val = validate_repetitions_index(obj, val) + val = types.util.checkDtype('repetitions_index', 'types.hdmf_common.VectorIndex', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.repetitions.export(fid, [fullpath '/repetitions'], refs); + refs = obj.repetitions_index.export(fid, [fullpath '/repetitions_index'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/EyeTracking.m b/+types/+core/EyeTracking.m new file mode 100644 index 00000000..b27553a5 --- /dev/null +++ b/+types/+core/EyeTracking.m @@ -0,0 +1,50 @@ +classdef EyeTracking < types.core.NWBDataInterface & types.untyped.GroupClass +% EYETRACKING Eye-tracking data, representing direction of gaze. + + +% OPTIONAL PROPERTIES +properties + spatialseries; % (SpatialSeries) SpatialSeries object containing data measuring direction of gaze. +end + +methods + function obj = EyeTracking(varargin) + % EYETRACKING Constructor for EyeTracking + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.spatialseries, ivarargin] = types.util.parseConstrained(obj,'spatialseries', 'types.core.SpatialSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.EyeTracking') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.spatialseries(obj, val) + obj.spatialseries = obj.validate_spatialseries(val); + end + %% VALIDATORS + + function val = validate_spatialseries(obj, val) + namedprops = struct(); + constrained = {'types.core.SpatialSeries'}; + types.util.checkSet('spatialseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.spatialseries) + refs = obj.spatialseries.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/FeatureExtraction.m b/+types/+core/FeatureExtraction.m new file mode 100644 index 00000000..cab26ebd --- /dev/null +++ b/+types/+core/FeatureExtraction.m @@ -0,0 +1,134 @@ +classdef FeatureExtraction < types.core.NWBDataInterface & types.untyped.GroupClass +% FEATUREEXTRACTION Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. + + +% REQUIRED PROPERTIES +properties + description; % REQUIRED (char) Description of features (eg, ''PC1'') for each of the extracted features. + electrodes; % REQUIRED (DynamicTableRegion) DynamicTableRegion pointer to the electrodes that this time series was generated from. + features; % REQUIRED (single) Multi-dimensional array of features extracted from each event. + times; % REQUIRED (double) Times of events that features correspond to (can be a link). +end + +methods + function obj = FeatureExtraction(varargin) + % FEATUREEXTRACTION Constructor for FeatureExtraction + obj = obj@types.core.NWBDataInterface(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'electrodes',[]); + addParameter(p, 'features',[]); + addParameter(p, 'times',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.electrodes = p.Results.electrodes; + obj.features = p.Results.features; + obj.times = p.Results.times; + if strcmp(class(obj), 'types.core.FeatureExtraction') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.electrodes(obj, val) + obj.electrodes = obj.validate_electrodes(val); + end + function set.features(obj, val) + obj.features = obj.validate_features(val); + end + function set.times(obj, val) + obj.times = obj.validate_times(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_electrodes(obj, val) + val = types.util.checkDtype('electrodes', 'types.hdmf_common.DynamicTableRegion', val); + end + function val = validate_features(obj, val) + val = types.util.checkDtype('features', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_times(obj, val) + val = types.util.checkDtype('times', 'double', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if startsWith(class(obj.description), 'types.untyped.') + refs = obj.description.export(fid, [fullpath '/description'], refs); + elseif ~isempty(obj.description) + io.writeDataset(fid, [fullpath '/description'], obj.description, 'forceArray'); + end + refs = obj.electrodes.export(fid, [fullpath '/electrodes'], refs); + if startsWith(class(obj.features), 'types.untyped.') + refs = obj.features.export(fid, [fullpath '/features'], refs); + elseif ~isempty(obj.features) + io.writeDataset(fid, [fullpath '/features'], obj.features, 'forceArray'); + end + if startsWith(class(obj.times), 'types.untyped.') + refs = obj.times.export(fid, [fullpath '/times'], refs); + elseif ~isempty(obj.times) + io.writeDataset(fid, [fullpath '/times'], obj.times, 'forceArray'); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/FilteredEphys.m b/+types/+core/FilteredEphys.m new file mode 100644 index 00000000..53c81128 --- /dev/null +++ b/+types/+core/FilteredEphys.m @@ -0,0 +1,48 @@ +classdef FilteredEphys < types.core.NWBDataInterface & types.untyped.GroupClass +% FILTEREDEPHYS Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. + + +% REQUIRED PROPERTIES +properties + electricalseries; % REQUIRED (ElectricalSeries) ElectricalSeries object(s) containing filtered electrophysiology data. +end + +methods + function obj = FilteredEphys(varargin) + % FILTEREDEPHYS Constructor for FilteredEphys + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.electricalseries, ivarargin] = types.util.parseConstrained(obj,'electricalseries', 'types.core.ElectricalSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.FilteredEphys') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.electricalseries(obj, val) + obj.electricalseries = obj.validate_electricalseries(val); + end + %% VALIDATORS + + function val = validate_electricalseries(obj, val) + namedprops = struct(); + constrained = {'types.core.ElectricalSeries'}; + types.util.checkSet('electricalseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.electricalseries.export(fid, fullpath, refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/Fluorescence.m b/+types/+core/Fluorescence.m new file mode 100644 index 00000000..904cb778 --- /dev/null +++ b/+types/+core/Fluorescence.m @@ -0,0 +1,48 @@ +classdef Fluorescence < types.core.NWBDataInterface & types.untyped.GroupClass +% FLUORESCENCE Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes). + + +% REQUIRED PROPERTIES +properties + roiresponseseries; % REQUIRED (RoiResponseSeries) RoiResponseSeries object(s) containing fluorescence data for a ROI. +end + +methods + function obj = Fluorescence(varargin) + % FLUORESCENCE Constructor for Fluorescence + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.roiresponseseries, ivarargin] = types.util.parseConstrained(obj,'roiresponseseries', 'types.core.RoiResponseSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.Fluorescence') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.roiresponseseries(obj, val) + obj.roiresponseseries = obj.validate_roiresponseseries(val); + end + %% VALIDATORS + + function val = validate_roiresponseseries(obj, val) + namedprops = struct(); + constrained = {'types.core.RoiResponseSeries'}; + types.util.checkSet('roiresponseseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.roiresponseseries.export(fid, fullpath, refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/GrayscaleImage.m b/+types/+core/GrayscaleImage.m new file mode 100644 index 00000000..0f773147 --- /dev/null +++ b/+types/+core/GrayscaleImage.m @@ -0,0 +1,40 @@ +classdef GrayscaleImage < types.core.Image & types.untyped.DatasetClass +% GRAYSCALEIMAGE A grayscale image. + + + +methods + function obj = GrayscaleImage(varargin) + % GRAYSCALEIMAGE Constructor for GrayscaleImage + obj = obj@types.core.Image(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + if strcmp(class(obj), 'types.core.GrayscaleImage') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.Image(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/IZeroClampSeries.m b/+types/+core/IZeroClampSeries.m new file mode 100644 index 00000000..8d6e32da --- /dev/null +++ b/+types/+core/IZeroClampSeries.m @@ -0,0 +1,98 @@ +classdef IZeroClampSeries < types.core.CurrentClampSeries & types.untyped.GroupClass +% IZEROCLAMPSERIES Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell. + + + +methods + function obj = IZeroClampSeries(varargin) + % IZEROCLAMPSERIES Constructor for IZeroClampSeries + varargin = [{'stimulus_description' 'N/A'} varargin]; + obj = obj@types.core.CurrentClampSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'bias_current',[]); + addParameter(p, 'bridge_balance',[]); + addParameter(p, 'capacitance_compensation',[]); + addParameter(p, 'stimulus_description',[]); + misc.parseSkipInvalidName(p, varargin); + obj.bias_current = p.Results.bias_current; + obj.bridge_balance = p.Results.bridge_balance; + obj.capacitance_compensation = p.Results.capacitance_compensation; + obj.stimulus_description = p.Results.stimulus_description; + if strcmp(class(obj), 'types.core.IZeroClampSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_bias_current(obj, val) + val = types.util.checkDtype('bias_current', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_bridge_balance(obj, val) + val = types.util.checkDtype('bridge_balance', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_capacitance_compensation(obj, val) + val = types.util.checkDtype('capacitance_compensation', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.CurrentClampSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Image.m b/+types/+core/Image.m new file mode 100644 index 00000000..93a0ab7d --- /dev/null +++ b/+types/+core/Image.m @@ -0,0 +1,96 @@ +classdef Image < types.core.NWBData & types.untyped.DatasetClass +% IMAGE An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). + + +% OPTIONAL PROPERTIES +properties + description; % (char) Description of the image. + resolution; % (single) Pixel resolution of the image, in pixels per centimeter. +end + +methods + function obj = Image(varargin) + % IMAGE Constructor for Image + obj = obj@types.core.NWBData(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'description',[]); + addParameter(p, 'resolution',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.description = p.Results.description; + obj.resolution = p.Results.resolution; + if strcmp(class(obj), 'types.core.Image') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.resolution(obj, val) + obj.resolution = obj.validate_resolution(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + end + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_resolution(obj, val) + val = types.util.checkDtype('resolution', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBData(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.description) + io.writeAttribute(fid, [fullpath '/description'], obj.description); + end + if ~isempty(obj.resolution) + io.writeAttribute(fid, [fullpath '/resolution'], obj.resolution); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/ImageMaskSeries.m b/+types/+core/ImageMaskSeries.m new file mode 100644 index 00000000..7514ac8b --- /dev/null +++ b/+types/+core/ImageMaskSeries.m @@ -0,0 +1,47 @@ +classdef ImageMaskSeries < types.core.ImageSeries & types.untyped.GroupClass +% IMAGEMASKSERIES An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. + + +% OPTIONAL PROPERTIES +properties + masked_imageseries; % ImageSeries +end + +methods + function obj = ImageMaskSeries(varargin) + % IMAGEMASKSERIES Constructor for ImageMaskSeries + obj = obj@types.core.ImageSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'masked_imageseries',[]); + misc.parseSkipInvalidName(p, varargin); + obj.masked_imageseries = p.Results.masked_imageseries; + if strcmp(class(obj), 'types.core.ImageMaskSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.masked_imageseries(obj, val) + obj.masked_imageseries = obj.validate_masked_imageseries(val); + end + %% VALIDATORS + + function val = validate_masked_imageseries(obj, val) + val = types.util.checkDtype('masked_imageseries', 'types.core.ImageSeries', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.masked_imageseries.export(fid, [fullpath '/masked_imageseries'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/ImageReferences.m b/+types/+core/ImageReferences.m new file mode 100644 index 00000000..e5139fbf --- /dev/null +++ b/+types/+core/ImageReferences.m @@ -0,0 +1,41 @@ +classdef ImageReferences < types.core.NWBData & types.untyped.DatasetClass +% IMAGEREFERENCES Ordered dataset of references to Image objects. + + + +methods + function obj = ImageReferences(varargin) + % IMAGEREFERENCES Constructor for ImageReferences + obj = obj@types.core.NWBData(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + if strcmp(class(obj), 'types.core.ImageReferences') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + % Reference to type `Image` + val = types.util.checkDtype('data', 'types.untyped.ObjectView', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBData(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/ImageSegmentation.m b/+types/+core/ImageSegmentation.m new file mode 100644 index 00000000..75d2c312 --- /dev/null +++ b/+types/+core/ImageSegmentation.m @@ -0,0 +1,48 @@ +classdef ImageSegmentation < types.core.NWBDataInterface & types.untyped.GroupClass +% IMAGESEGMENTATION Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them. + + +% REQUIRED PROPERTIES +properties + planesegmentation; % REQUIRED (PlaneSegmentation) Results from image segmentation of a specific imaging plane. +end + +methods + function obj = ImageSegmentation(varargin) + % IMAGESEGMENTATION Constructor for ImageSegmentation + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.planesegmentation, ivarargin] = types.util.parseConstrained(obj,'planesegmentation', 'types.core.PlaneSegmentation', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.ImageSegmentation') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.planesegmentation(obj, val) + obj.planesegmentation = obj.validate_planesegmentation(val); + end + %% VALIDATORS + + function val = validate_planesegmentation(obj, val) + namedprops = struct(); + constrained = {'types.core.PlaneSegmentation'}; + types.util.checkSet('planesegmentation', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.planesegmentation.export(fid, fullpath, refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/ImageSeries.m b/+types/+core/ImageSeries.m new file mode 100644 index 00000000..5f45b8a0 --- /dev/null +++ b/+types/+core/ImageSeries.m @@ -0,0 +1,189 @@ +classdef ImageSeries < types.core.TimeSeries & types.untyped.GroupClass +% IMAGESERIES General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z]. + + +% OPTIONAL PROPERTIES +properties + device; % Device + dimension; % (int32) Number of pixels on x, y, (and z) axes. + external_file; % (char) Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + external_file_starting_frame; % (int32) Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. + format; % (char) Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. +end + +methods + function obj = ImageSeries(varargin) + % IMAGESERIES Constructor for ImageSeries + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'device',[]); + addParameter(p, 'dimension',[]); + addParameter(p, 'external_file',[]); + addParameter(p, 'external_file_starting_frame',[]); + addParameter(p, 'format',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.device = p.Results.device; + obj.dimension = p.Results.dimension; + obj.external_file = p.Results.external_file; + obj.external_file_starting_frame = p.Results.external_file_starting_frame; + obj.format = p.Results.format; + if strcmp(class(obj), 'types.core.ImageSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.device(obj, val) + obj.device = obj.validate_device(val); + end + function set.dimension(obj, val) + obj.dimension = obj.validate_dimension(val); + end + function set.external_file(obj, val) + obj.external_file = obj.validate_external_file(val); + end + function set.external_file_starting_frame(obj, val) + obj.external_file_starting_frame = obj.validate_external_file_starting_frame(val); + end + function set.format(obj, val) + obj.format = obj.validate_format(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf,Inf,Inf], [Inf,Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_device(obj, val) + val = types.util.checkDtype('device', 'types.core.Device', val); + end + function val = validate_dimension(obj, val) + val = types.util.checkDtype('dimension', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_external_file(obj, val) + val = types.util.checkDtype('external_file', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_external_file_starting_frame(obj, val) + val = types.util.checkDtype('external_file_starting_frame', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_format(obj, val) + val = types.util.checkDtype('format', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.device) + refs = obj.device.export(fid, [fullpath '/device'], refs); + end + if ~isempty(obj.dimension) + if startsWith(class(obj.dimension), 'types.untyped.') + refs = obj.dimension.export(fid, [fullpath '/dimension'], refs); + elseif ~isempty(obj.dimension) + io.writeDataset(fid, [fullpath '/dimension'], obj.dimension, 'forceArray'); + end + end + if ~isempty(obj.external_file) + if startsWith(class(obj.external_file), 'types.untyped.') + refs = obj.external_file.export(fid, [fullpath '/external_file'], refs); + elseif ~isempty(obj.external_file) + io.writeDataset(fid, [fullpath '/external_file'], obj.external_file, 'forceArray'); + end + end + if ~isempty(obj.external_file) && ~isa(obj.external_file, 'types.untyped.SoftLink') && ~isa(obj.external_file, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/external_file/starting_frame'], obj.external_file_starting_frame, 'forceArray'); + end + if ~isempty(obj.format) + if startsWith(class(obj.format), 'types.untyped.') + refs = obj.format.export(fid, [fullpath '/format'], refs); + elseif ~isempty(obj.format) + io.writeDataset(fid, [fullpath '/format'], obj.format); + end + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Images.m b/+types/+core/Images.m new file mode 100644 index 00000000..b07572ad --- /dev/null +++ b/+types/+core/Images.m @@ -0,0 +1,87 @@ +classdef Images < types.core.NWBDataInterface & types.untyped.GroupClass +% IMAGES A collection of images with an optional way to specify the order of the images using the "order_of_images" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. + + +% REQUIRED PROPERTIES +properties + image; % REQUIRED (Image) Images stored in this collection. +end +% OPTIONAL PROPERTIES +properties + description; % (char) Description of this collection of images. + order_of_images; % (ImageReferences) Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. +end + +methods + function obj = Images(varargin) + % IMAGES Constructor for Images + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.image, ivarargin] = types.util.parseConstrained(obj,'image', 'types.core.Image', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'order_of_images',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.order_of_images = p.Results.order_of_images; + if strcmp(class(obj), 'types.core.Images') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.image(obj, val) + obj.image = obj.validate_image(val); + end + function set.order_of_images(obj, val) + obj.order_of_images = obj.validate_order_of_images(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_image(obj, val) + constrained = { 'types.core.Image' }; + types.util.checkSet('image', struct(), constrained, val); + end + function val = validate_order_of_images(obj, val) + val = types.util.checkDtype('order_of_images', 'types.core.ImageReferences', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/description'], obj.description); + refs = obj.image.export(fid, fullpath, refs); + if ~isempty(obj.order_of_images) + refs = obj.order_of_images.export(fid, [fullpath '/order_of_images'], refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/ImagingPlane.m b/+types/+core/ImagingPlane.m new file mode 100644 index 00000000..ddfdcb59 --- /dev/null +++ b/+types/+core/ImagingPlane.m @@ -0,0 +1,443 @@ +classdef ImagingPlane < types.core.NWBContainer & types.untyped.GroupClass +% IMAGINGPLANE An imaging plane and its metadata. + + +% REQUIRED PROPERTIES +properties + excitation_lambda; % REQUIRED (single) Excitation wavelength, in nm. + indicator; % REQUIRED (char) Calcium indicator. + location; % REQUIRED (char) Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + opticalchannel; % REQUIRED (OpticalChannel) An optical channel used to record from an imaging plane. +end +% OPTIONAL PROPERTIES +properties + description; % (char) Description of the imaging plane. + device; % Device + grid_spacing; % (single) Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + grid_spacing_unit; % (char) Measurement units for grid_spacing. The default value is 'meters'. + imaging_rate; % (single) Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. + manifold; % (single) DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + manifold_conversion; % (single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + manifold_unit; % (char) Base unit of measurement for working with the data. The default value is 'meters'. + origin_coords; % (single) Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + origin_coords_unit; % (char) Measurement units for origin_coords. The default value is 'meters'. + reference_frame; % (char) Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)." +end + +methods + function obj = ImagingPlane(varargin) + % IMAGINGPLANE Constructor for ImagingPlane + varargin = [{'grid_spacing_unit' 'meters' 'manifold_conversion' types.util.correctType(1, 'single') 'manifold_unit' 'meters' 'origin_coords_unit' 'meters'} varargin]; + obj = obj@types.core.NWBContainer(varargin{:}); + [obj.opticalchannel, ivarargin] = types.util.parseConstrained(obj,'opticalchannel', 'types.core.OpticalChannel', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'device',[]); + addParameter(p, 'excitation_lambda',[]); + addParameter(p, 'grid_spacing',[]); + addParameter(p, 'grid_spacing_unit',[]); + addParameter(p, 'imaging_rate',[]); + addParameter(p, 'indicator',[]); + addParameter(p, 'location',[]); + addParameter(p, 'manifold',[]); + addParameter(p, 'manifold_conversion',[]); + addParameter(p, 'manifold_unit',[]); + addParameter(p, 'origin_coords',[]); + addParameter(p, 'origin_coords_unit',[]); + addParameter(p, 'reference_frame',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.device = p.Results.device; + obj.excitation_lambda = p.Results.excitation_lambda; + obj.grid_spacing = p.Results.grid_spacing; + obj.grid_spacing_unit = p.Results.grid_spacing_unit; + obj.imaging_rate = p.Results.imaging_rate; + obj.indicator = p.Results.indicator; + obj.location = p.Results.location; + obj.manifold = p.Results.manifold; + obj.manifold_conversion = p.Results.manifold_conversion; + obj.manifold_unit = p.Results.manifold_unit; + obj.origin_coords = p.Results.origin_coords; + obj.origin_coords_unit = p.Results.origin_coords_unit; + obj.reference_frame = p.Results.reference_frame; + if strcmp(class(obj), 'types.core.ImagingPlane') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.device(obj, val) + obj.device = obj.validate_device(val); + end + function set.excitation_lambda(obj, val) + obj.excitation_lambda = obj.validate_excitation_lambda(val); + end + function set.grid_spacing(obj, val) + obj.grid_spacing = obj.validate_grid_spacing(val); + end + function set.grid_spacing_unit(obj, val) + obj.grid_spacing_unit = obj.validate_grid_spacing_unit(val); + end + function set.imaging_rate(obj, val) + obj.imaging_rate = obj.validate_imaging_rate(val); + end + function set.indicator(obj, val) + obj.indicator = obj.validate_indicator(val); + end + function set.location(obj, val) + obj.location = obj.validate_location(val); + end + function set.manifold(obj, val) + obj.manifold = obj.validate_manifold(val); + end + function set.manifold_conversion(obj, val) + obj.manifold_conversion = obj.validate_manifold_conversion(val); + end + function set.manifold_unit(obj, val) + obj.manifold_unit = obj.validate_manifold_unit(val); + end + function set.opticalchannel(obj, val) + obj.opticalchannel = obj.validate_opticalchannel(val); + end + function set.origin_coords(obj, val) + obj.origin_coords = obj.validate_origin_coords(val); + end + function set.origin_coords_unit(obj, val) + obj.origin_coords_unit = obj.validate_origin_coords_unit(val); + end + function set.reference_frame(obj, val) + obj.reference_frame = obj.validate_reference_frame(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_device(obj, val) + val = types.util.checkDtype('device', 'types.core.Device', val); + end + function val = validate_excitation_lambda(obj, val) + val = types.util.checkDtype('excitation_lambda', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_grid_spacing(obj, val) + val = types.util.checkDtype('grid_spacing', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3], [2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_grid_spacing_unit(obj, val) + val = types.util.checkDtype('grid_spacing_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_imaging_rate(obj, val) + val = types.util.checkDtype('imaging_rate', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_indicator(obj, val) + val = types.util.checkDtype('indicator', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_location(obj, val) + val = types.util.checkDtype('location', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_manifold(obj, val) + val = types.util.checkDtype('manifold', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3,Inf,Inf,Inf], [3,Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_manifold_conversion(obj, val) + val = types.util.checkDtype('manifold_conversion', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_manifold_unit(obj, val) + val = types.util.checkDtype('manifold_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_opticalchannel(obj, val) + namedprops = struct(); + constrained = {'types.core.OpticalChannel'}; + types.util.checkSet('opticalchannel', namedprops, constrained, val); + end + function val = validate_origin_coords(obj, val) + val = types.util.checkDtype('origin_coords', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3], [2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_origin_coords_unit(obj, val) + val = types.util.checkDtype('origin_coords_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_reference_frame(obj, val) + val = types.util.checkDtype('reference_frame', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.description) + if startsWith(class(obj.description), 'types.untyped.') + refs = obj.description.export(fid, [fullpath '/description'], refs); + elseif ~isempty(obj.description) + io.writeDataset(fid, [fullpath '/description'], obj.description); + end + end + refs = obj.device.export(fid, [fullpath '/device'], refs); + if startsWith(class(obj.excitation_lambda), 'types.untyped.') + refs = obj.excitation_lambda.export(fid, [fullpath '/excitation_lambda'], refs); + elseif ~isempty(obj.excitation_lambda) + io.writeDataset(fid, [fullpath '/excitation_lambda'], obj.excitation_lambda); + end + if ~isempty(obj.grid_spacing) + if startsWith(class(obj.grid_spacing), 'types.untyped.') + refs = obj.grid_spacing.export(fid, [fullpath '/grid_spacing'], refs); + elseif ~isempty(obj.grid_spacing) + io.writeDataset(fid, [fullpath '/grid_spacing'], obj.grid_spacing, 'forceArray'); + end + end + if ~isempty(obj.grid_spacing) && ~isa(obj.grid_spacing, 'types.untyped.SoftLink') && ~isa(obj.grid_spacing, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/grid_spacing/unit'], obj.grid_spacing_unit); + end + if ~isempty(obj.imaging_rate) + if startsWith(class(obj.imaging_rate), 'types.untyped.') + refs = obj.imaging_rate.export(fid, [fullpath '/imaging_rate'], refs); + elseif ~isempty(obj.imaging_rate) + io.writeDataset(fid, [fullpath '/imaging_rate'], obj.imaging_rate); + end + end + if startsWith(class(obj.indicator), 'types.untyped.') + refs = obj.indicator.export(fid, [fullpath '/indicator'], refs); + elseif ~isempty(obj.indicator) + io.writeDataset(fid, [fullpath '/indicator'], obj.indicator); + end + if startsWith(class(obj.location), 'types.untyped.') + refs = obj.location.export(fid, [fullpath '/location'], refs); + elseif ~isempty(obj.location) + io.writeDataset(fid, [fullpath '/location'], obj.location); + end + if ~isempty(obj.manifold) + if startsWith(class(obj.manifold), 'types.untyped.') + refs = obj.manifold.export(fid, [fullpath '/manifold'], refs); + elseif ~isempty(obj.manifold) + io.writeDataset(fid, [fullpath '/manifold'], obj.manifold, 'forceArray'); + end + end + if ~isempty(obj.manifold) && ~isa(obj.manifold, 'types.untyped.SoftLink') && ~isa(obj.manifold, 'types.untyped.ExternalLink') && ~isempty(obj.manifold_conversion) + io.writeAttribute(fid, [fullpath '/manifold/conversion'], obj.manifold_conversion); + end + if ~isempty(obj.manifold) && ~isa(obj.manifold, 'types.untyped.SoftLink') && ~isa(obj.manifold, 'types.untyped.ExternalLink') && ~isempty(obj.manifold_unit) + io.writeAttribute(fid, [fullpath '/manifold/unit'], obj.manifold_unit); + end + refs = obj.opticalchannel.export(fid, fullpath, refs); + if ~isempty(obj.origin_coords) + if startsWith(class(obj.origin_coords), 'types.untyped.') + refs = obj.origin_coords.export(fid, [fullpath '/origin_coords'], refs); + elseif ~isempty(obj.origin_coords) + io.writeDataset(fid, [fullpath '/origin_coords'], obj.origin_coords, 'forceArray'); + end + end + if ~isempty(obj.origin_coords) && ~isa(obj.origin_coords, 'types.untyped.SoftLink') && ~isa(obj.origin_coords, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/origin_coords/unit'], obj.origin_coords_unit); + end + if ~isempty(obj.reference_frame) + if startsWith(class(obj.reference_frame), 'types.untyped.') + refs = obj.reference_frame.export(fid, [fullpath '/reference_frame'], refs); + elseif ~isempty(obj.reference_frame) + io.writeDataset(fid, [fullpath '/reference_frame'], obj.reference_frame); + end + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/ImagingRetinotopy.m b/+types/+core/ImagingRetinotopy.m new file mode 100644 index 00000000..a7acb096 --- /dev/null +++ b/+types/+core/ImagingRetinotopy.m @@ -0,0 +1,901 @@ +classdef ImagingRetinotopy < types.core.NWBDataInterface & types.untyped.GroupClass +% IMAGINGRETINOTOPY Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x). + + +% REQUIRED PROPERTIES +properties + axis_1_phase_map; % REQUIRED (single) Phase response to stimulus on the first measured axis. + axis_2_phase_map; % REQUIRED (single) Phase response to stimulus on the second measured axis. + axis_descriptions; % REQUIRED (char) Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta']. + vasculature_image; % REQUIRED (uint16) Gray-scale anatomical image of cortical surface. Array structure: [rows][columns] +end +% OPTIONAL PROPERTIES +properties + axis_1_phase_map_dimension; % (int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + axis_1_phase_map_field_of_view; % (single) Size of viewing area, in meters. + axis_1_phase_map_unit; % (char) Unit that axis data is stored in (e.g., degrees). + axis_1_power_map; % (single) Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. + axis_1_power_map_dimension; % (int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + axis_1_power_map_field_of_view; % (single) Size of viewing area, in meters. + axis_1_power_map_unit; % (char) Unit that axis data is stored in (e.g., degrees). + axis_2_phase_map_dimension; % (int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + axis_2_phase_map_field_of_view; % (single) Size of viewing area, in meters. + axis_2_phase_map_unit; % (char) Unit that axis data is stored in (e.g., degrees). + axis_2_power_map; % (single) Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. + axis_2_power_map_dimension; % (int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + axis_2_power_map_field_of_view; % (single) Size of viewing area, in meters. + axis_2_power_map_unit; % (char) Unit that axis data is stored in (e.g., degrees). + focal_depth_image; % (uint16) Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns]. + focal_depth_image_bits_per_pixel; % (int32) Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. + focal_depth_image_dimension; % (int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + focal_depth_image_field_of_view; % (single) Size of viewing area, in meters. + focal_depth_image_focal_depth; % (single) Focal depth offset, in meters. + focal_depth_image_format; % (char) Format of image. Right now only 'raw' is supported. + sign_map; % (single) Sine of the angle between the direction of the gradient in axis_1 and axis_2. + sign_map_dimension; % (int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + sign_map_field_of_view; % (single) Size of viewing area, in meters. + vasculature_image_bits_per_pixel; % (int32) Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value + vasculature_image_dimension; % (int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + vasculature_image_field_of_view; % (single) Size of viewing area, in meters. + vasculature_image_format; % (char) Format of image. Right now only 'raw' is supported. +end + +methods + function obj = ImagingRetinotopy(varargin) + % IMAGINGRETINOTOPY Constructor for ImagingRetinotopy + obj = obj@types.core.NWBDataInterface(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'axis_1_phase_map',[]); + addParameter(p, 'axis_1_phase_map_dimension',[]); + addParameter(p, 'axis_1_phase_map_field_of_view',[]); + addParameter(p, 'axis_1_phase_map_unit',[]); + addParameter(p, 'axis_1_power_map',[]); + addParameter(p, 'axis_1_power_map_dimension',[]); + addParameter(p, 'axis_1_power_map_field_of_view',[]); + addParameter(p, 'axis_1_power_map_unit',[]); + addParameter(p, 'axis_2_phase_map',[]); + addParameter(p, 'axis_2_phase_map_dimension',[]); + addParameter(p, 'axis_2_phase_map_field_of_view',[]); + addParameter(p, 'axis_2_phase_map_unit',[]); + addParameter(p, 'axis_2_power_map',[]); + addParameter(p, 'axis_2_power_map_dimension',[]); + addParameter(p, 'axis_2_power_map_field_of_view',[]); + addParameter(p, 'axis_2_power_map_unit',[]); + addParameter(p, 'axis_descriptions',[]); + addParameter(p, 'focal_depth_image',[]); + addParameter(p, 'focal_depth_image_bits_per_pixel',[]); + addParameter(p, 'focal_depth_image_dimension',[]); + addParameter(p, 'focal_depth_image_field_of_view',[]); + addParameter(p, 'focal_depth_image_focal_depth',[]); + addParameter(p, 'focal_depth_image_format',[]); + addParameter(p, 'sign_map',[]); + addParameter(p, 'sign_map_dimension',[]); + addParameter(p, 'sign_map_field_of_view',[]); + addParameter(p, 'vasculature_image',[]); + addParameter(p, 'vasculature_image_bits_per_pixel',[]); + addParameter(p, 'vasculature_image_dimension',[]); + addParameter(p, 'vasculature_image_field_of_view',[]); + addParameter(p, 'vasculature_image_format',[]); + misc.parseSkipInvalidName(p, varargin); + obj.axis_1_phase_map = p.Results.axis_1_phase_map; + obj.axis_1_phase_map_dimension = p.Results.axis_1_phase_map_dimension; + obj.axis_1_phase_map_field_of_view = p.Results.axis_1_phase_map_field_of_view; + obj.axis_1_phase_map_unit = p.Results.axis_1_phase_map_unit; + obj.axis_1_power_map = p.Results.axis_1_power_map; + obj.axis_1_power_map_dimension = p.Results.axis_1_power_map_dimension; + obj.axis_1_power_map_field_of_view = p.Results.axis_1_power_map_field_of_view; + obj.axis_1_power_map_unit = p.Results.axis_1_power_map_unit; + obj.axis_2_phase_map = p.Results.axis_2_phase_map; + obj.axis_2_phase_map_dimension = p.Results.axis_2_phase_map_dimension; + obj.axis_2_phase_map_field_of_view = p.Results.axis_2_phase_map_field_of_view; + obj.axis_2_phase_map_unit = p.Results.axis_2_phase_map_unit; + obj.axis_2_power_map = p.Results.axis_2_power_map; + obj.axis_2_power_map_dimension = p.Results.axis_2_power_map_dimension; + obj.axis_2_power_map_field_of_view = p.Results.axis_2_power_map_field_of_view; + obj.axis_2_power_map_unit = p.Results.axis_2_power_map_unit; + obj.axis_descriptions = p.Results.axis_descriptions; + obj.focal_depth_image = p.Results.focal_depth_image; + obj.focal_depth_image_bits_per_pixel = p.Results.focal_depth_image_bits_per_pixel; + obj.focal_depth_image_dimension = p.Results.focal_depth_image_dimension; + obj.focal_depth_image_field_of_view = p.Results.focal_depth_image_field_of_view; + obj.focal_depth_image_focal_depth = p.Results.focal_depth_image_focal_depth; + obj.focal_depth_image_format = p.Results.focal_depth_image_format; + obj.sign_map = p.Results.sign_map; + obj.sign_map_dimension = p.Results.sign_map_dimension; + obj.sign_map_field_of_view = p.Results.sign_map_field_of_view; + obj.vasculature_image = p.Results.vasculature_image; + obj.vasculature_image_bits_per_pixel = p.Results.vasculature_image_bits_per_pixel; + obj.vasculature_image_dimension = p.Results.vasculature_image_dimension; + obj.vasculature_image_field_of_view = p.Results.vasculature_image_field_of_view; + obj.vasculature_image_format = p.Results.vasculature_image_format; + if strcmp(class(obj), 'types.core.ImagingRetinotopy') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.axis_1_phase_map(obj, val) + obj.axis_1_phase_map = obj.validate_axis_1_phase_map(val); + end + function set.axis_1_phase_map_dimension(obj, val) + obj.axis_1_phase_map_dimension = obj.validate_axis_1_phase_map_dimension(val); + end + function set.axis_1_phase_map_field_of_view(obj, val) + obj.axis_1_phase_map_field_of_view = obj.validate_axis_1_phase_map_field_of_view(val); + end + function set.axis_1_phase_map_unit(obj, val) + obj.axis_1_phase_map_unit = obj.validate_axis_1_phase_map_unit(val); + end + function set.axis_1_power_map(obj, val) + obj.axis_1_power_map = obj.validate_axis_1_power_map(val); + end + function set.axis_1_power_map_dimension(obj, val) + obj.axis_1_power_map_dimension = obj.validate_axis_1_power_map_dimension(val); + end + function set.axis_1_power_map_field_of_view(obj, val) + obj.axis_1_power_map_field_of_view = obj.validate_axis_1_power_map_field_of_view(val); + end + function set.axis_1_power_map_unit(obj, val) + obj.axis_1_power_map_unit = obj.validate_axis_1_power_map_unit(val); + end + function set.axis_2_phase_map(obj, val) + obj.axis_2_phase_map = obj.validate_axis_2_phase_map(val); + end + function set.axis_2_phase_map_dimension(obj, val) + obj.axis_2_phase_map_dimension = obj.validate_axis_2_phase_map_dimension(val); + end + function set.axis_2_phase_map_field_of_view(obj, val) + obj.axis_2_phase_map_field_of_view = obj.validate_axis_2_phase_map_field_of_view(val); + end + function set.axis_2_phase_map_unit(obj, val) + obj.axis_2_phase_map_unit = obj.validate_axis_2_phase_map_unit(val); + end + function set.axis_2_power_map(obj, val) + obj.axis_2_power_map = obj.validate_axis_2_power_map(val); + end + function set.axis_2_power_map_dimension(obj, val) + obj.axis_2_power_map_dimension = obj.validate_axis_2_power_map_dimension(val); + end + function set.axis_2_power_map_field_of_view(obj, val) + obj.axis_2_power_map_field_of_view = obj.validate_axis_2_power_map_field_of_view(val); + end + function set.axis_2_power_map_unit(obj, val) + obj.axis_2_power_map_unit = obj.validate_axis_2_power_map_unit(val); + end + function set.axis_descriptions(obj, val) + obj.axis_descriptions = obj.validate_axis_descriptions(val); + end + function set.focal_depth_image(obj, val) + obj.focal_depth_image = obj.validate_focal_depth_image(val); + end + function set.focal_depth_image_bits_per_pixel(obj, val) + obj.focal_depth_image_bits_per_pixel = obj.validate_focal_depth_image_bits_per_pixel(val); + end + function set.focal_depth_image_dimension(obj, val) + obj.focal_depth_image_dimension = obj.validate_focal_depth_image_dimension(val); + end + function set.focal_depth_image_field_of_view(obj, val) + obj.focal_depth_image_field_of_view = obj.validate_focal_depth_image_field_of_view(val); + end + function set.focal_depth_image_focal_depth(obj, val) + obj.focal_depth_image_focal_depth = obj.validate_focal_depth_image_focal_depth(val); + end + function set.focal_depth_image_format(obj, val) + obj.focal_depth_image_format = obj.validate_focal_depth_image_format(val); + end + function set.sign_map(obj, val) + obj.sign_map = obj.validate_sign_map(val); + end + function set.sign_map_dimension(obj, val) + obj.sign_map_dimension = obj.validate_sign_map_dimension(val); + end + function set.sign_map_field_of_view(obj, val) + obj.sign_map_field_of_view = obj.validate_sign_map_field_of_view(val); + end + function set.vasculature_image(obj, val) + obj.vasculature_image = obj.validate_vasculature_image(val); + end + function set.vasculature_image_bits_per_pixel(obj, val) + obj.vasculature_image_bits_per_pixel = obj.validate_vasculature_image_bits_per_pixel(val); + end + function set.vasculature_image_dimension(obj, val) + obj.vasculature_image_dimension = obj.validate_vasculature_image_dimension(val); + end + function set.vasculature_image_field_of_view(obj, val) + obj.vasculature_image_field_of_view = obj.validate_vasculature_image_field_of_view(val); + end + function set.vasculature_image_format(obj, val) + obj.vasculature_image_format = obj.validate_vasculature_image_format(val); + end + %% VALIDATORS + + function val = validate_axis_1_phase_map(obj, val) + val = types.util.checkDtype('axis_1_phase_map', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_1_phase_map_dimension(obj, val) + val = types.util.checkDtype('axis_1_phase_map_dimension', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_1_phase_map_field_of_view(obj, val) + val = types.util.checkDtype('axis_1_phase_map_field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_1_phase_map_unit(obj, val) + val = types.util.checkDtype('axis_1_phase_map_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_1_power_map(obj, val) + val = types.util.checkDtype('axis_1_power_map', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_1_power_map_dimension(obj, val) + val = types.util.checkDtype('axis_1_power_map_dimension', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_1_power_map_field_of_view(obj, val) + val = types.util.checkDtype('axis_1_power_map_field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_1_power_map_unit(obj, val) + val = types.util.checkDtype('axis_1_power_map_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_2_phase_map(obj, val) + val = types.util.checkDtype('axis_2_phase_map', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_2_phase_map_dimension(obj, val) + val = types.util.checkDtype('axis_2_phase_map_dimension', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_2_phase_map_field_of_view(obj, val) + val = types.util.checkDtype('axis_2_phase_map_field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_2_phase_map_unit(obj, val) + val = types.util.checkDtype('axis_2_phase_map_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_2_power_map(obj, val) + val = types.util.checkDtype('axis_2_power_map', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_2_power_map_dimension(obj, val) + val = types.util.checkDtype('axis_2_power_map_dimension', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_2_power_map_field_of_view(obj, val) + val = types.util.checkDtype('axis_2_power_map_field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_2_power_map_unit(obj, val) + val = types.util.checkDtype('axis_2_power_map_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_axis_descriptions(obj, val) + val = types.util.checkDtype('axis_descriptions', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_focal_depth_image(obj, val) + val = types.util.checkDtype('focal_depth_image', 'uint16', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_focal_depth_image_bits_per_pixel(obj, val) + val = types.util.checkDtype('focal_depth_image_bits_per_pixel', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_focal_depth_image_dimension(obj, val) + val = types.util.checkDtype('focal_depth_image_dimension', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_focal_depth_image_field_of_view(obj, val) + val = types.util.checkDtype('focal_depth_image_field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_focal_depth_image_focal_depth(obj, val) + val = types.util.checkDtype('focal_depth_image_focal_depth', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_focal_depth_image_format(obj, val) + val = types.util.checkDtype('focal_depth_image_format', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_sign_map(obj, val) + val = types.util.checkDtype('sign_map', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_sign_map_dimension(obj, val) + val = types.util.checkDtype('sign_map_dimension', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_sign_map_field_of_view(obj, val) + val = types.util.checkDtype('sign_map_field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_vasculature_image(obj, val) + val = types.util.checkDtype('vasculature_image', 'uint16', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_vasculature_image_bits_per_pixel(obj, val) + val = types.util.checkDtype('vasculature_image_bits_per_pixel', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_vasculature_image_dimension(obj, val) + val = types.util.checkDtype('vasculature_image_dimension', 'int32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_vasculature_image_field_of_view(obj, val) + val = types.util.checkDtype('vasculature_image_field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_vasculature_image_format(obj, val) + val = types.util.checkDtype('vasculature_image_format', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if startsWith(class(obj.axis_1_phase_map), 'types.untyped.') + refs = obj.axis_1_phase_map.export(fid, [fullpath '/axis_1_phase_map'], refs); + elseif ~isempty(obj.axis_1_phase_map) + io.writeDataset(fid, [fullpath '/axis_1_phase_map'], obj.axis_1_phase_map, 'forceArray'); + end + if ~isempty(obj.axis_1_phase_map) && ~isa(obj.axis_1_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_phase_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_1_phase_map/dimension'], obj.axis_1_phase_map_dimension, 'forceArray'); + end + if ~isempty(obj.axis_1_phase_map) && ~isa(obj.axis_1_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_phase_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_1_phase_map/field_of_view'], obj.axis_1_phase_map_field_of_view, 'forceArray'); + end + if ~isempty(obj.axis_1_phase_map) && ~isa(obj.axis_1_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_phase_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_1_phase_map/unit'], obj.axis_1_phase_map_unit); + end + if ~isempty(obj.axis_1_power_map) + if startsWith(class(obj.axis_1_power_map), 'types.untyped.') + refs = obj.axis_1_power_map.export(fid, [fullpath '/axis_1_power_map'], refs); + elseif ~isempty(obj.axis_1_power_map) + io.writeDataset(fid, [fullpath '/axis_1_power_map'], obj.axis_1_power_map, 'forceArray'); + end + end + if ~isempty(obj.axis_1_power_map) && ~isa(obj.axis_1_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_power_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_1_power_map/dimension'], obj.axis_1_power_map_dimension, 'forceArray'); + end + if ~isempty(obj.axis_1_power_map) && ~isa(obj.axis_1_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_power_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_1_power_map/field_of_view'], obj.axis_1_power_map_field_of_view, 'forceArray'); + end + if ~isempty(obj.axis_1_power_map) && ~isa(obj.axis_1_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_power_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_1_power_map/unit'], obj.axis_1_power_map_unit); + end + if startsWith(class(obj.axis_2_phase_map), 'types.untyped.') + refs = obj.axis_2_phase_map.export(fid, [fullpath '/axis_2_phase_map'], refs); + elseif ~isempty(obj.axis_2_phase_map) + io.writeDataset(fid, [fullpath '/axis_2_phase_map'], obj.axis_2_phase_map, 'forceArray'); + end + if ~isempty(obj.axis_2_phase_map) && ~isa(obj.axis_2_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_phase_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_2_phase_map/dimension'], obj.axis_2_phase_map_dimension, 'forceArray'); + end + if ~isempty(obj.axis_2_phase_map) && ~isa(obj.axis_2_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_phase_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_2_phase_map/field_of_view'], obj.axis_2_phase_map_field_of_view, 'forceArray'); + end + if ~isempty(obj.axis_2_phase_map) && ~isa(obj.axis_2_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_phase_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_2_phase_map/unit'], obj.axis_2_phase_map_unit); + end + if ~isempty(obj.axis_2_power_map) + if startsWith(class(obj.axis_2_power_map), 'types.untyped.') + refs = obj.axis_2_power_map.export(fid, [fullpath '/axis_2_power_map'], refs); + elseif ~isempty(obj.axis_2_power_map) + io.writeDataset(fid, [fullpath '/axis_2_power_map'], obj.axis_2_power_map, 'forceArray'); + end + end + if ~isempty(obj.axis_2_power_map) && ~isa(obj.axis_2_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_power_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_2_power_map/dimension'], obj.axis_2_power_map_dimension, 'forceArray'); + end + if ~isempty(obj.axis_2_power_map) && ~isa(obj.axis_2_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_power_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_2_power_map/field_of_view'], obj.axis_2_power_map_field_of_view, 'forceArray'); + end + if ~isempty(obj.axis_2_power_map) && ~isa(obj.axis_2_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_power_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/axis_2_power_map/unit'], obj.axis_2_power_map_unit); + end + if startsWith(class(obj.axis_descriptions), 'types.untyped.') + refs = obj.axis_descriptions.export(fid, [fullpath '/axis_descriptions'], refs); + elseif ~isempty(obj.axis_descriptions) + io.writeDataset(fid, [fullpath '/axis_descriptions'], obj.axis_descriptions, 'forceArray'); + end + if ~isempty(obj.focal_depth_image) + if startsWith(class(obj.focal_depth_image), 'types.untyped.') + refs = obj.focal_depth_image.export(fid, [fullpath '/focal_depth_image'], refs); + elseif ~isempty(obj.focal_depth_image) + io.writeDataset(fid, [fullpath '/focal_depth_image'], obj.focal_depth_image, 'forceArray'); + end + end + if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/focal_depth_image/bits_per_pixel'], obj.focal_depth_image_bits_per_pixel); + end + if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/focal_depth_image/dimension'], obj.focal_depth_image_dimension, 'forceArray'); + end + if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/focal_depth_image/field_of_view'], obj.focal_depth_image_field_of_view, 'forceArray'); + end + if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/focal_depth_image/focal_depth'], obj.focal_depth_image_focal_depth); + end + if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/focal_depth_image/format'], obj.focal_depth_image_format); + end + if ~isempty(obj.sign_map) + if startsWith(class(obj.sign_map), 'types.untyped.') + refs = obj.sign_map.export(fid, [fullpath '/sign_map'], refs); + elseif ~isempty(obj.sign_map) + io.writeDataset(fid, [fullpath '/sign_map'], obj.sign_map, 'forceArray'); + end + end + if ~isempty(obj.sign_map) && ~isa(obj.sign_map, 'types.untyped.SoftLink') && ~isa(obj.sign_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/sign_map/dimension'], obj.sign_map_dimension, 'forceArray'); + end + if ~isempty(obj.sign_map) && ~isa(obj.sign_map, 'types.untyped.SoftLink') && ~isa(obj.sign_map, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/sign_map/field_of_view'], obj.sign_map_field_of_view, 'forceArray'); + end + if startsWith(class(obj.vasculature_image), 'types.untyped.') + refs = obj.vasculature_image.export(fid, [fullpath '/vasculature_image'], refs); + elseif ~isempty(obj.vasculature_image) + io.writeDataset(fid, [fullpath '/vasculature_image'], obj.vasculature_image, 'forceArray'); + end + if ~isempty(obj.vasculature_image) && ~isa(obj.vasculature_image, 'types.untyped.SoftLink') && ~isa(obj.vasculature_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/vasculature_image/bits_per_pixel'], obj.vasculature_image_bits_per_pixel); + end + if ~isempty(obj.vasculature_image) && ~isa(obj.vasculature_image, 'types.untyped.SoftLink') && ~isa(obj.vasculature_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/vasculature_image/dimension'], obj.vasculature_image_dimension, 'forceArray'); + end + if ~isempty(obj.vasculature_image) && ~isa(obj.vasculature_image, 'types.untyped.SoftLink') && ~isa(obj.vasculature_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/vasculature_image/field_of_view'], obj.vasculature_image_field_of_view, 'forceArray'); + end + if ~isempty(obj.vasculature_image) && ~isa(obj.vasculature_image, 'types.untyped.SoftLink') && ~isa(obj.vasculature_image, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/vasculature_image/format'], obj.vasculature_image_format); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/IndexSeries.m b/+types/+core/IndexSeries.m new file mode 100644 index 00000000..10f394d5 --- /dev/null +++ b/+types/+core/IndexSeries.m @@ -0,0 +1,144 @@ +classdef IndexSeries < types.core.TimeSeries & types.untyped.GroupClass +% INDEXSERIES Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. + + +% OPTIONAL PROPERTIES +properties + indexed_images; % Images + indexed_timeseries; % ImageSeries +end + +methods + function obj = IndexSeries(varargin) + % INDEXSERIES Constructor for IndexSeries + varargin = [{'data_unit' 'N/A'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_conversion',[]); + addParameter(p, 'data_offset',[]); + addParameter(p, 'data_resolution',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'indexed_images',[]); + addParameter(p, 'indexed_timeseries',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_conversion = p.Results.data_conversion; + obj.data_offset = p.Results.data_offset; + obj.data_resolution = p.Results.data_resolution; + obj.data_unit = p.Results.data_unit; + obj.indexed_images = p.Results.indexed_images; + obj.indexed_timeseries = p.Results.indexed_timeseries; + if strcmp(class(obj), 'types.core.IndexSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.indexed_images(obj, val) + obj.indexed_images = obj.validate_indexed_images(val); + end + function set.indexed_timeseries(obj, val) + obj.indexed_timeseries = obj.validate_indexed_timeseries(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'uint32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_conversion(obj, val) + val = types.util.checkDtype('data_conversion', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_offset(obj, val) + val = types.util.checkDtype('data_offset', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_resolution(obj, val) + val = types.util.checkDtype('data_resolution', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_indexed_images(obj, val) + val = types.util.checkDtype('indexed_images', 'types.core.Images', val); + end + function val = validate_indexed_timeseries(obj, val) + val = types.util.checkDtype('indexed_timeseries', 'types.core.ImageSeries', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.indexed_images) + refs = obj.indexed_images.export(fid, [fullpath '/indexed_images'], refs); + end + if ~isempty(obj.indexed_timeseries) + refs = obj.indexed_timeseries.export(fid, [fullpath '/indexed_timeseries'], refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/IntervalSeries.m b/+types/+core/IntervalSeries.m new file mode 100644 index 00000000..67445fdf --- /dev/null +++ b/+types/+core/IntervalSeries.m @@ -0,0 +1,60 @@ +classdef IntervalSeries < types.core.TimeSeries & types.untyped.GroupClass +% INTERVALSERIES Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. + + + +methods + function obj = IntervalSeries(varargin) + % INTERVALSERIES Constructor for IntervalSeries + varargin = [{'data_resolution' types.util.correctType(-1, 'single') 'data_unit' 'n/a'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_resolution',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_resolution = p.Results.data_resolution; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.IntervalSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'int8', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/IntracellularElectrode.m b/+types/+core/IntracellularElectrode.m new file mode 100644 index 00000000..6b1231e0 --- /dev/null +++ b/+types/+core/IntracellularElectrode.m @@ -0,0 +1,296 @@ +classdef IntracellularElectrode < types.core.NWBContainer & types.untyped.GroupClass +% INTRACELLULARELECTRODE An intracellular electrode and its metadata. + + +% REQUIRED PROPERTIES +properties + description; % REQUIRED (char) Description of electrode (e.g., whole-cell, sharp, etc.). +end +% OPTIONAL PROPERTIES +properties + cell_id; % (char) unique ID of the cell + device; % Device + filtering; % (char) Electrode specific filtering. + initial_access_resistance; % (char) Initial access resistance. + location; % (char) Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + resistance; % (char) Electrode resistance, in ohms. + seal; % (char) Information about seal used for recording. + slice; % (char) Information about slice used for recording. +end + +methods + function obj = IntracellularElectrode(varargin) + % INTRACELLULARELECTRODE Constructor for IntracellularElectrode + obj = obj@types.core.NWBContainer(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'cell_id',[]); + addParameter(p, 'description',[]); + addParameter(p, 'device',[]); + addParameter(p, 'filtering',[]); + addParameter(p, 'initial_access_resistance',[]); + addParameter(p, 'location',[]); + addParameter(p, 'resistance',[]); + addParameter(p, 'seal',[]); + addParameter(p, 'slice',[]); + misc.parseSkipInvalidName(p, varargin); + obj.cell_id = p.Results.cell_id; + obj.description = p.Results.description; + obj.device = p.Results.device; + obj.filtering = p.Results.filtering; + obj.initial_access_resistance = p.Results.initial_access_resistance; + obj.location = p.Results.location; + obj.resistance = p.Results.resistance; + obj.seal = p.Results.seal; + obj.slice = p.Results.slice; + if strcmp(class(obj), 'types.core.IntracellularElectrode') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.cell_id(obj, val) + obj.cell_id = obj.validate_cell_id(val); + end + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.device(obj, val) + obj.device = obj.validate_device(val); + end + function set.filtering(obj, val) + obj.filtering = obj.validate_filtering(val); + end + function set.initial_access_resistance(obj, val) + obj.initial_access_resistance = obj.validate_initial_access_resistance(val); + end + function set.location(obj, val) + obj.location = obj.validate_location(val); + end + function set.resistance(obj, val) + obj.resistance = obj.validate_resistance(val); + end + function set.seal(obj, val) + obj.seal = obj.validate_seal(val); + end + function set.slice(obj, val) + obj.slice = obj.validate_slice(val); + end + %% VALIDATORS + + function val = validate_cell_id(obj, val) + val = types.util.checkDtype('cell_id', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_device(obj, val) + val = types.util.checkDtype('device', 'types.core.Device', val); + end + function val = validate_filtering(obj, val) + val = types.util.checkDtype('filtering', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_initial_access_resistance(obj, val) + val = types.util.checkDtype('initial_access_resistance', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_location(obj, val) + val = types.util.checkDtype('location', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_resistance(obj, val) + val = types.util.checkDtype('resistance', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_seal(obj, val) + val = types.util.checkDtype('seal', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_slice(obj, val) + val = types.util.checkDtype('slice', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.cell_id) + if startsWith(class(obj.cell_id), 'types.untyped.') + refs = obj.cell_id.export(fid, [fullpath '/cell_id'], refs); + elseif ~isempty(obj.cell_id) + io.writeDataset(fid, [fullpath '/cell_id'], obj.cell_id); + end + end + if startsWith(class(obj.description), 'types.untyped.') + refs = obj.description.export(fid, [fullpath '/description'], refs); + elseif ~isempty(obj.description) + io.writeDataset(fid, [fullpath '/description'], obj.description); + end + refs = obj.device.export(fid, [fullpath '/device'], refs); + if ~isempty(obj.filtering) + if startsWith(class(obj.filtering), 'types.untyped.') + refs = obj.filtering.export(fid, [fullpath '/filtering'], refs); + elseif ~isempty(obj.filtering) + io.writeDataset(fid, [fullpath '/filtering'], obj.filtering); + end + end + if ~isempty(obj.initial_access_resistance) + if startsWith(class(obj.initial_access_resistance), 'types.untyped.') + refs = obj.initial_access_resistance.export(fid, [fullpath '/initial_access_resistance'], refs); + elseif ~isempty(obj.initial_access_resistance) + io.writeDataset(fid, [fullpath '/initial_access_resistance'], obj.initial_access_resistance); + end + end + if ~isempty(obj.location) + if startsWith(class(obj.location), 'types.untyped.') + refs = obj.location.export(fid, [fullpath '/location'], refs); + elseif ~isempty(obj.location) + io.writeDataset(fid, [fullpath '/location'], obj.location); + end + end + if ~isempty(obj.resistance) + if startsWith(class(obj.resistance), 'types.untyped.') + refs = obj.resistance.export(fid, [fullpath '/resistance'], refs); + elseif ~isempty(obj.resistance) + io.writeDataset(fid, [fullpath '/resistance'], obj.resistance); + end + end + if ~isempty(obj.seal) + if startsWith(class(obj.seal), 'types.untyped.') + refs = obj.seal.export(fid, [fullpath '/seal'], refs); + elseif ~isempty(obj.seal) + io.writeDataset(fid, [fullpath '/seal'], obj.seal); + end + end + if ~isempty(obj.slice) + if startsWith(class(obj.slice), 'types.untyped.') + refs = obj.slice.export(fid, [fullpath '/slice'], refs); + elseif ~isempty(obj.slice) + io.writeDataset(fid, [fullpath '/slice'], obj.slice); + end + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/IntracellularElectrodesTable.m b/+types/+core/IntracellularElectrodesTable.m new file mode 100644 index 00000000..8acf4c58 --- /dev/null +++ b/+types/+core/IntracellularElectrodesTable.m @@ -0,0 +1,53 @@ +classdef IntracellularElectrodesTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% INTRACELLULARELECTRODESTABLE Table for storing intracellular electrode related metadata. + + +% REQUIRED PROPERTIES +properties + electrode; % REQUIRED (VectorData) Column for storing the reference to the intracellular electrode. +end + +methods + function obj = IntracellularElectrodesTable(varargin) + % INTRACELLULARELECTRODESTABLE Constructor for IntracellularElectrodesTable + varargin = [{'description' 'Table for storing intracellular electrode related metadata.'} varargin]; + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'electrode',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.electrode = p.Results.electrode; + if strcmp(class(obj), 'types.core.IntracellularElectrodesTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.IntracellularElectrodesTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.electrode(obj, val) + obj.electrode = obj.validate_electrode(val); + end + %% VALIDATORS + + function val = validate_electrode(obj, val) + val = types.util.checkDtype('electrode', 'types.hdmf_common.VectorData', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.electrode.export(fid, [fullpath '/electrode'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/IntracellularRecordingsTable.m b/+types/+core/IntracellularRecordingsTable.m new file mode 100644 index 00000000..15ef1f57 --- /dev/null +++ b/+types/+core/IntracellularRecordingsTable.m @@ -0,0 +1,73 @@ +classdef IntracellularRecordingsTable < types.hdmf_common.AlignedDynamicTable & types.untyped.GroupClass +% INTRACELLULARRECORDINGSTABLE A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. + + +% REQUIRED PROPERTIES +properties + electrodes; % REQUIRED (IntracellularElectrodesTable) Table for storing intracellular electrode related metadata. + responses; % REQUIRED (IntracellularResponsesTable) Table for storing intracellular response related metadata. + stimuli; % REQUIRED (IntracellularStimuliTable) Table for storing intracellular stimulus related metadata. +end + +methods + function obj = IntracellularRecordingsTable(varargin) + % INTRACELLULARRECORDINGSTABLE Constructor for IntracellularRecordingsTable + varargin = [{'description' 'A table to group together a stimulus and response from a single electrode and a single simultaneous recording and for storing metadata about the intracellular recording.'} varargin]; + obj = obj@types.hdmf_common.AlignedDynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'electrodes',[]); + addParameter(p, 'responses',[]); + addParameter(p, 'stimuli',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.electrodes = p.Results.electrodes; + obj.responses = p.Results.responses; + obj.stimuli = p.Results.stimuli; + if strcmp(class(obj), 'types.core.IntracellularRecordingsTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.IntracellularRecordingsTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.electrodes(obj, val) + obj.electrodes = obj.validate_electrodes(val); + end + function set.responses(obj, val) + obj.responses = obj.validate_responses(val); + end + function set.stimuli(obj, val) + obj.stimuli = obj.validate_stimuli(val); + end + %% VALIDATORS + + function val = validate_electrodes(obj, val) + val = types.util.checkDtype('electrodes', 'types.core.IntracellularElectrodesTable', val); + end + function val = validate_responses(obj, val) + val = types.util.checkDtype('responses', 'types.core.IntracellularResponsesTable', val); + end + function val = validate_stimuli(obj, val) + val = types.util.checkDtype('stimuli', 'types.core.IntracellularStimuliTable', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.AlignedDynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.electrodes.export(fid, [fullpath '/electrodes'], refs); + refs = obj.responses.export(fid, [fullpath '/responses'], refs); + refs = obj.stimuli.export(fid, [fullpath '/stimuli'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/IntracellularResponsesTable.m b/+types/+core/IntracellularResponsesTable.m new file mode 100644 index 00000000..14ad66dd --- /dev/null +++ b/+types/+core/IntracellularResponsesTable.m @@ -0,0 +1,53 @@ +classdef IntracellularResponsesTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% INTRACELLULARRESPONSESTABLE Table for storing intracellular response related metadata. + + +% REQUIRED PROPERTIES +properties + response; % REQUIRED (TimeSeriesReferenceVectorData) Column storing the reference to the recorded response for the recording (rows) +end + +methods + function obj = IntracellularResponsesTable(varargin) + % INTRACELLULARRESPONSESTABLE Constructor for IntracellularResponsesTable + varargin = [{'description' 'Table for storing intracellular response related metadata.'} varargin]; + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'response',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.response = p.Results.response; + if strcmp(class(obj), 'types.core.IntracellularResponsesTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.IntracellularResponsesTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.response(obj, val) + obj.response = obj.validate_response(val); + end + %% VALIDATORS + + function val = validate_response(obj, val) + val = types.util.checkDtype('response', 'types.core.TimeSeriesReferenceVectorData', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.response.export(fid, [fullpath '/response'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/IntracellularStimuliTable.m b/+types/+core/IntracellularStimuliTable.m new file mode 100644 index 00000000..ff87e707 --- /dev/null +++ b/+types/+core/IntracellularStimuliTable.m @@ -0,0 +1,53 @@ +classdef IntracellularStimuliTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% INTRACELLULARSTIMULITABLE Table for storing intracellular stimulus related metadata. + + +% REQUIRED PROPERTIES +properties + stimulus; % REQUIRED (TimeSeriesReferenceVectorData) Column storing the reference to the recorded stimulus for the recording (rows). +end + +methods + function obj = IntracellularStimuliTable(varargin) + % INTRACELLULARSTIMULITABLE Constructor for IntracellularStimuliTable + varargin = [{'description' 'Table for storing intracellular stimulus related metadata.'} varargin]; + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'stimulus',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.stimulus = p.Results.stimulus; + if strcmp(class(obj), 'types.core.IntracellularStimuliTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.IntracellularStimuliTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.stimulus(obj, val) + obj.stimulus = obj.validate_stimulus(val); + end + %% VALIDATORS + + function val = validate_stimulus(obj, val) + val = types.util.checkDtype('stimulus', 'types.core.TimeSeriesReferenceVectorData', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.stimulus.export(fid, [fullpath '/stimulus'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/LFP.m b/+types/+core/LFP.m new file mode 100644 index 00000000..56eaf392 --- /dev/null +++ b/+types/+core/LFP.m @@ -0,0 +1,48 @@ +classdef LFP < types.core.NWBDataInterface & types.untyped.GroupClass +% LFP LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. + + +% REQUIRED PROPERTIES +properties + electricalseries; % REQUIRED (ElectricalSeries) ElectricalSeries object(s) containing LFP data for one or more channels. +end + +methods + function obj = LFP(varargin) + % LFP Constructor for LFP + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.electricalseries, ivarargin] = types.util.parseConstrained(obj,'electricalseries', 'types.core.ElectricalSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.LFP') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.electricalseries(obj, val) + obj.electricalseries = obj.validate_electricalseries(val); + end + %% VALIDATORS + + function val = validate_electricalseries(obj, val) + namedprops = struct(); + constrained = {'types.core.ElectricalSeries'}; + types.util.checkSet('electricalseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.electricalseries.export(fid, fullpath, refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/LabMetaData.m b/+types/+core/LabMetaData.m new file mode 100644 index 00000000..80933fc3 --- /dev/null +++ b/+types/+core/LabMetaData.m @@ -0,0 +1,28 @@ +classdef LabMetaData < types.core.NWBContainer & types.untyped.GroupClass +% LABMETADATA Lab-specific meta-data. + + + +methods + function obj = LabMetaData(varargin) + % LABMETADATA Constructor for LabMetaData + obj = obj@types.core.NWBContainer(varargin{:}); + if strcmp(class(obj), 'types.core.LabMetaData') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/MotionCorrection.m b/+types/+core/MotionCorrection.m new file mode 100644 index 00000000..c6cc113a --- /dev/null +++ b/+types/+core/MotionCorrection.m @@ -0,0 +1,48 @@ +classdef MotionCorrection < types.core.NWBDataInterface & types.untyped.GroupClass +% MOTIONCORRECTION An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions). + + +% REQUIRED PROPERTIES +properties + correctedimagestack; % REQUIRED (CorrectedImageStack) Reuslts from motion correction of an image stack. +end + +methods + function obj = MotionCorrection(varargin) + % MOTIONCORRECTION Constructor for MotionCorrection + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.correctedimagestack, ivarargin] = types.util.parseConstrained(obj,'correctedimagestack', 'types.core.CorrectedImageStack', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.MotionCorrection') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.correctedimagestack(obj, val) + obj.correctedimagestack = obj.validate_correctedimagestack(val); + end + %% VALIDATORS + + function val = validate_correctedimagestack(obj, val) + namedprops = struct(); + constrained = {'types.core.CorrectedImageStack'}; + types.util.checkSet('correctedimagestack', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.correctedimagestack.export(fid, fullpath, refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/NWBContainer.m b/+types/+core/NWBContainer.m new file mode 100644 index 00000000..7d53229c --- /dev/null +++ b/+types/+core/NWBContainer.m @@ -0,0 +1,28 @@ +classdef NWBContainer < types.hdmf_common.Container & types.untyped.GroupClass +% NWBCONTAINER An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. + + + +methods + function obj = NWBContainer(varargin) + % NWBCONTAINER Constructor for NWBContainer + obj = obj@types.hdmf_common.Container(varargin{:}); + if strcmp(class(obj), 'types.core.NWBContainer') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.Container(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/NWBData.m b/+types/+core/NWBData.m new file mode 100644 index 00000000..8a1ef897 --- /dev/null +++ b/+types/+core/NWBData.m @@ -0,0 +1,39 @@ +classdef NWBData < types.hdmf_common.Data & types.untyped.DatasetClass +% NWBDATA An abstract data type for a dataset. + + + +methods + function obj = NWBData(varargin) + % NWBDATA Constructor for NWBData + obj = obj@types.hdmf_common.Data(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + if strcmp(class(obj), 'types.core.NWBData') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.Data(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/NWBDataInterface.m b/+types/+core/NWBDataInterface.m new file mode 100644 index 00000000..182ffe34 --- /dev/null +++ b/+types/+core/NWBDataInterface.m @@ -0,0 +1,28 @@ +classdef NWBDataInterface < types.core.NWBContainer & types.untyped.GroupClass +% NWBDATAINTERFACE An abstract data type for a generic container storing collections of data, as opposed to metadata. + + + +methods + function obj = NWBDataInterface(varargin) + % NWBDATAINTERFACE Constructor for NWBDataInterface + obj = obj@types.core.NWBContainer(varargin{:}); + if strcmp(class(obj), 'types.core.NWBDataInterface') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/NWBFile.m b/+types/+core/NWBFile.m new file mode 100644 index 00000000..f3b40a1b --- /dev/null +++ b/+types/+core/NWBFile.m @@ -0,0 +1,1093 @@ +classdef NWBFile < types.core.NWBContainer & types.untyped.GroupClass +% NWBFILE An NWB file storing cellular-based neurophysiology data from a single experimental session. + + +% READONLY PROPERTIES +properties(SetAccess = protected) + nwb_version; % (char) File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers. +end +% REQUIRED PROPERTIES +properties + file_create_date; % REQUIRED (datetime) A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array. + identifier; % REQUIRED (char) A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, or a hash of these and/or other values. The goal is that the string should be unique to all other files. + session_description; % REQUIRED (char) A description of the experimental session and data in the file. + session_start_time; % REQUIRED (datetime) Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. + timestamps_reference_time; % REQUIRED (datetime) Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero). +end +% OPTIONAL PROPERTIES +properties + acquisition; % (DynamicTable|NWBDataInterface) Tabular data that is relevent to acquisition | Acquired, raw data. + analysis; % (DynamicTable|NWBContainer) Tabular data that is relevent to data stored in analysis | Custom analysis results. + general; % (LabMetaData) Place-holder than can be extended so that lab-specific meta-data can be placed in /general. + general_data_collection; % (char) Notes about data collection and analysis. + general_devices; % (Device) Data acquisition devices. + general_experiment_description; % (char) General description of the experiment. + general_experimenter; % (char) Name of person(s) who performed the experiment. Can also specify roles of different people involved. + general_extracellular_ephys; % (ElectrodeGroup) Physical group of electrodes. + general_extracellular_ephys_electrodes; % (DynamicTable) A table of all electrodes (i.e. channels) used for recording. + general_institution; % (char) Institution(s) where experiment was performed. + general_intracellular_ephys; % (IntracellularElectrode) An intracellular electrode. + general_intracellular_ephys_experimental_conditions; % (ExperimentalConditionsTable) A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions. + general_intracellular_ephys_filtering; % (char) [DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries. + general_intracellular_ephys_intracellular_recordings; % (IntracellularRecordingsTable) A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. + general_intracellular_ephys_repetitions; % (RepetitionsTable) A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. + general_intracellular_ephys_sequential_recordings; % (SequentialRecordingsTable) A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence. + general_intracellular_ephys_simultaneous_recordings; % (SimultaneousRecordingsTable) A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes + general_intracellular_ephys_sweep_table; % (SweepTable) [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata. + general_keywords; % (char) Terms to search over. + general_lab; % (char) Laboratory where experiment was performed. + general_notes; % (char) Notes about the experiment. + general_optogenetics; % (OptogeneticStimulusSite) An optogenetic stimulation site. + general_optophysiology; % (ImagingPlane) An imaging plane. + general_pharmacology; % (char) Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. + general_protocol; % (char) Experimental protocol, if applicable. e.g., include IACUC protocol number. + general_related_publications; % (char) Publication information. PMID, DOI, URL, etc. + general_session_id; % (char) Lab-specific ID for the session. + general_slices; % (char) Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. + general_source_script; % (char) Script file or link to public source code used to create this NWB file. + general_source_script_file_name; % (char) Name of script file. + general_stimulus; % (char) Notes about stimuli, such as how and where they were presented. + general_subject; % (Subject) Information about the animal or person from which the data was measured. + general_surgery; % (char) Narrative description about surgery/surgeries, including date(s) and who performed surgery. + general_virus; % (char) Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. + intervals; % (TimeIntervals) Optional additional table(s) for describing other experimental time intervals. + intervals_epochs; % (TimeIntervals) Divisions in time marking experimental stages or sub-divisions of a single recording session. + intervals_invalid_times; % (TimeIntervals) Time intervals that should be removed from analysis. + intervals_trials; % (TimeIntervals) Repeated experimental events that have a logical grouping. + processing; % (ProcessingModule) Intermediate analysis of acquired data. + scratch; % (DynamicTable|NWBContainer|ScratchData) Any one-off tables | Any one-off containers | Any one-off datasets + stimulus_presentation; % (TimeSeries) TimeSeries objects containing data of presented stimuli. + stimulus_templates; % (Images|TimeSeries) Images objects containing images of presented stimuli. | TimeSeries objects containing template data of presented stimuli. + units; % (Units) Data about sorted spike units. +end + +methods + function obj = NWBFile(varargin) + % NWBFILE Constructor for NWBFile + varargin = [{'nwb_version' '2.6.0'} varargin]; + obj = obj@types.core.NWBContainer(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'acquisition',types.untyped.Set()); + addParameter(p, 'analysis',types.untyped.Set()); + addParameter(p, 'file_create_date',[]); + addParameter(p, 'general',types.untyped.Set()); + addParameter(p, 'general_data_collection',[]); + addParameter(p, 'general_devices',types.untyped.Set()); + addParameter(p, 'general_experiment_description',[]); + addParameter(p, 'general_experimenter',[]); + addParameter(p, 'general_extracellular_ephys',types.untyped.Set()); + addParameter(p, 'general_extracellular_ephys_electrodes',[]); + addParameter(p, 'general_institution',[]); + addParameter(p, 'general_intracellular_ephys',types.untyped.Set()); + addParameter(p, 'general_intracellular_ephys_experimental_conditions',[]); + addParameter(p, 'general_intracellular_ephys_filtering',[]); + addParameter(p, 'general_intracellular_ephys_intracellular_recordings',[]); + addParameter(p, 'general_intracellular_ephys_repetitions',[]); + addParameter(p, 'general_intracellular_ephys_sequential_recordings',[]); + addParameter(p, 'general_intracellular_ephys_simultaneous_recordings',[]); + addParameter(p, 'general_intracellular_ephys_sweep_table',[]); + addParameter(p, 'general_keywords',[]); + addParameter(p, 'general_lab',[]); + addParameter(p, 'general_notes',[]); + addParameter(p, 'general_optogenetics',types.untyped.Set()); + addParameter(p, 'general_optophysiology',types.untyped.Set()); + addParameter(p, 'general_pharmacology',[]); + addParameter(p, 'general_protocol',[]); + addParameter(p, 'general_related_publications',[]); + addParameter(p, 'general_session_id',[]); + addParameter(p, 'general_slices',[]); + addParameter(p, 'general_source_script',[]); + addParameter(p, 'general_source_script_file_name',[]); + addParameter(p, 'general_stimulus',[]); + addParameter(p, 'general_subject',[]); + addParameter(p, 'general_surgery',[]); + addParameter(p, 'general_virus',[]); + addParameter(p, 'identifier',[]); + addParameter(p, 'intervals',types.untyped.Set()); + addParameter(p, 'intervals_epochs',[]); + addParameter(p, 'intervals_invalid_times',[]); + addParameter(p, 'intervals_trials',[]); + addParameter(p, 'nwb_version',[]); + addParameter(p, 'processing',types.untyped.Set()); + addParameter(p, 'scratch',types.untyped.Set()); + addParameter(p, 'session_description',[]); + addParameter(p, 'session_start_time',[]); + addParameter(p, 'stimulus_presentation',types.untyped.Set()); + addParameter(p, 'stimulus_templates',types.untyped.Set()); + addParameter(p, 'timestamps_reference_time',[]); + addParameter(p, 'units',[]); + misc.parseSkipInvalidName(p, varargin); + obj.acquisition = p.Results.acquisition; + obj.analysis = p.Results.analysis; + obj.file_create_date = p.Results.file_create_date; + obj.general = p.Results.general; + obj.general_data_collection = p.Results.general_data_collection; + obj.general_devices = p.Results.general_devices; + obj.general_experiment_description = p.Results.general_experiment_description; + obj.general_experimenter = p.Results.general_experimenter; + obj.general_extracellular_ephys = p.Results.general_extracellular_ephys; + obj.general_extracellular_ephys_electrodes = p.Results.general_extracellular_ephys_electrodes; + obj.general_institution = p.Results.general_institution; + obj.general_intracellular_ephys = p.Results.general_intracellular_ephys; + obj.general_intracellular_ephys_experimental_conditions = p.Results.general_intracellular_ephys_experimental_conditions; + obj.general_intracellular_ephys_filtering = p.Results.general_intracellular_ephys_filtering; + obj.general_intracellular_ephys_intracellular_recordings = p.Results.general_intracellular_ephys_intracellular_recordings; + obj.general_intracellular_ephys_repetitions = p.Results.general_intracellular_ephys_repetitions; + obj.general_intracellular_ephys_sequential_recordings = p.Results.general_intracellular_ephys_sequential_recordings; + obj.general_intracellular_ephys_simultaneous_recordings = p.Results.general_intracellular_ephys_simultaneous_recordings; + obj.general_intracellular_ephys_sweep_table = p.Results.general_intracellular_ephys_sweep_table; + obj.general_keywords = p.Results.general_keywords; + obj.general_lab = p.Results.general_lab; + obj.general_notes = p.Results.general_notes; + obj.general_optogenetics = p.Results.general_optogenetics; + obj.general_optophysiology = p.Results.general_optophysiology; + obj.general_pharmacology = p.Results.general_pharmacology; + obj.general_protocol = p.Results.general_protocol; + obj.general_related_publications = p.Results.general_related_publications; + obj.general_session_id = p.Results.general_session_id; + obj.general_slices = p.Results.general_slices; + obj.general_source_script = p.Results.general_source_script; + obj.general_source_script_file_name = p.Results.general_source_script_file_name; + obj.general_stimulus = p.Results.general_stimulus; + obj.general_subject = p.Results.general_subject; + obj.general_surgery = p.Results.general_surgery; + obj.general_virus = p.Results.general_virus; + obj.identifier = p.Results.identifier; + obj.intervals = p.Results.intervals; + obj.intervals_epochs = p.Results.intervals_epochs; + obj.intervals_invalid_times = p.Results.intervals_invalid_times; + obj.intervals_trials = p.Results.intervals_trials; + obj.nwb_version = p.Results.nwb_version; + obj.processing = p.Results.processing; + obj.scratch = p.Results.scratch; + obj.session_description = p.Results.session_description; + obj.session_start_time = p.Results.session_start_time; + obj.stimulus_presentation = p.Results.stimulus_presentation; + obj.stimulus_templates = p.Results.stimulus_templates; + obj.timestamps_reference_time = p.Results.timestamps_reference_time; + obj.units = p.Results.units; + if strcmp(class(obj), 'types.core.NWBFile') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.acquisition(obj, val) + obj.acquisition = obj.validate_acquisition(val); + end + function set.analysis(obj, val) + obj.analysis = obj.validate_analysis(val); + end + function set.file_create_date(obj, val) + obj.file_create_date = obj.validate_file_create_date(val); + end + function set.general(obj, val) + obj.general = obj.validate_general(val); + end + function set.general_data_collection(obj, val) + obj.general_data_collection = obj.validate_general_data_collection(val); + end + function set.general_devices(obj, val) + obj.general_devices = obj.validate_general_devices(val); + end + function set.general_experiment_description(obj, val) + obj.general_experiment_description = obj.validate_general_experiment_description(val); + end + function set.general_experimenter(obj, val) + obj.general_experimenter = obj.validate_general_experimenter(val); + end + function set.general_extracellular_ephys(obj, val) + obj.general_extracellular_ephys = obj.validate_general_extracellular_ephys(val); + end + function set.general_extracellular_ephys_electrodes(obj, val) + obj.general_extracellular_ephys_electrodes = obj.validate_general_extracellular_ephys_electrodes(val); + end + function set.general_institution(obj, val) + obj.general_institution = obj.validate_general_institution(val); + end + function set.general_intracellular_ephys(obj, val) + obj.general_intracellular_ephys = obj.validate_general_intracellular_ephys(val); + end + function set.general_intracellular_ephys_experimental_conditions(obj, val) + obj.general_intracellular_ephys_experimental_conditions = obj.validate_general_intracellular_ephys_experimental_conditions(val); + end + function set.general_intracellular_ephys_filtering(obj, val) + obj.general_intracellular_ephys_filtering = obj.validate_general_intracellular_ephys_filtering(val); + end + function set.general_intracellular_ephys_intracellular_recordings(obj, val) + obj.general_intracellular_ephys_intracellular_recordings = obj.validate_general_intracellular_ephys_intracellular_recordings(val); + end + function set.general_intracellular_ephys_repetitions(obj, val) + obj.general_intracellular_ephys_repetitions = obj.validate_general_intracellular_ephys_repetitions(val); + end + function set.general_intracellular_ephys_sequential_recordings(obj, val) + obj.general_intracellular_ephys_sequential_recordings = obj.validate_general_intracellular_ephys_sequential_recordings(val); + end + function set.general_intracellular_ephys_simultaneous_recordings(obj, val) + obj.general_intracellular_ephys_simultaneous_recordings = obj.validate_general_intracellular_ephys_simultaneous_recordings(val); + end + function set.general_intracellular_ephys_sweep_table(obj, val) + obj.general_intracellular_ephys_sweep_table = obj.validate_general_intracellular_ephys_sweep_table(val); + end + function set.general_keywords(obj, val) + obj.general_keywords = obj.validate_general_keywords(val); + end + function set.general_lab(obj, val) + obj.general_lab = obj.validate_general_lab(val); + end + function set.general_notes(obj, val) + obj.general_notes = obj.validate_general_notes(val); + end + function set.general_optogenetics(obj, val) + obj.general_optogenetics = obj.validate_general_optogenetics(val); + end + function set.general_optophysiology(obj, val) + obj.general_optophysiology = obj.validate_general_optophysiology(val); + end + function set.general_pharmacology(obj, val) + obj.general_pharmacology = obj.validate_general_pharmacology(val); + end + function set.general_protocol(obj, val) + obj.general_protocol = obj.validate_general_protocol(val); + end + function set.general_related_publications(obj, val) + obj.general_related_publications = obj.validate_general_related_publications(val); + end + function set.general_session_id(obj, val) + obj.general_session_id = obj.validate_general_session_id(val); + end + function set.general_slices(obj, val) + obj.general_slices = obj.validate_general_slices(val); + end + function set.general_source_script(obj, val) + obj.general_source_script = obj.validate_general_source_script(val); + end + function set.general_source_script_file_name(obj, val) + obj.general_source_script_file_name = obj.validate_general_source_script_file_name(val); + end + function set.general_stimulus(obj, val) + obj.general_stimulus = obj.validate_general_stimulus(val); + end + function set.general_subject(obj, val) + obj.general_subject = obj.validate_general_subject(val); + end + function set.general_surgery(obj, val) + obj.general_surgery = obj.validate_general_surgery(val); + end + function set.general_virus(obj, val) + obj.general_virus = obj.validate_general_virus(val); + end + function set.identifier(obj, val) + obj.identifier = obj.validate_identifier(val); + end + function set.intervals(obj, val) + obj.intervals = obj.validate_intervals(val); + end + function set.intervals_epochs(obj, val) + obj.intervals_epochs = obj.validate_intervals_epochs(val); + end + function set.intervals_invalid_times(obj, val) + obj.intervals_invalid_times = obj.validate_intervals_invalid_times(val); + end + function set.intervals_trials(obj, val) + obj.intervals_trials = obj.validate_intervals_trials(val); + end + function set.processing(obj, val) + obj.processing = obj.validate_processing(val); + end + function set.scratch(obj, val) + obj.scratch = obj.validate_scratch(val); + end + function set.session_description(obj, val) + obj.session_description = obj.validate_session_description(val); + end + function set.session_start_time(obj, val) + obj.session_start_time = obj.validate_session_start_time(val); + end + function set.stimulus_presentation(obj, val) + obj.stimulus_presentation = obj.validate_stimulus_presentation(val); + end + function set.stimulus_templates(obj, val) + obj.stimulus_templates = obj.validate_stimulus_templates(val); + end + function set.timestamps_reference_time(obj, val) + obj.timestamps_reference_time = obj.validate_timestamps_reference_time(val); + end + function set.units(obj, val) + obj.units = obj.validate_units(val); + end + %% VALIDATORS + + function val = validate_acquisition(obj, val) + constrained = {'types.hdmf_common.DynamicTable', 'types.core.NWBDataInterface'}; + types.util.checkSet('acquisition', struct(), constrained, val); + end + function val = validate_analysis(obj, val) + constrained = {'types.hdmf_common.DynamicTable', 'types.core.NWBContainer'}; + types.util.checkSet('analysis', struct(), constrained, val); + end + function val = validate_file_create_date(obj, val) + val = types.util.checkDtype('file_create_date', 'datetime', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general(obj, val) + namedprops = struct(); + constrained = {'types.core.LabMetaData'}; + types.util.checkSet('general', namedprops, constrained, val); + end + function val = validate_general_data_collection(obj, val) + val = types.util.checkDtype('general_data_collection', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_devices(obj, val) + namedprops = struct(); + constrained = {'types.core.Device'}; + types.util.checkSet('general_devices', namedprops, constrained, val); + end + function val = validate_general_experiment_description(obj, val) + val = types.util.checkDtype('general_experiment_description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_experimenter(obj, val) + val = types.util.checkDtype('general_experimenter', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_extracellular_ephys(obj, val) + namedprops = struct(); + constrained = {'types.core.ElectrodeGroup'}; + types.util.checkSet('general_extracellular_ephys', namedprops, constrained, val); + end + function val = validate_general_extracellular_ephys_electrodes(obj, val) + val = types.util.checkDtype('general_extracellular_ephys_electrodes', 'types.hdmf_common.DynamicTable', val); + end + function val = validate_general_institution(obj, val) + val = types.util.checkDtype('general_institution', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_intracellular_ephys(obj, val) + namedprops = struct(); + constrained = {'types.core.IntracellularElectrode'}; + types.util.checkSet('general_intracellular_ephys', namedprops, constrained, val); + end + function val = validate_general_intracellular_ephys_experimental_conditions(obj, val) + val = types.util.checkDtype('general_intracellular_ephys_experimental_conditions', 'types.core.ExperimentalConditionsTable', val); + end + function val = validate_general_intracellular_ephys_filtering(obj, val) + val = types.util.checkDtype('general_intracellular_ephys_filtering', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_intracellular_ephys_intracellular_recordings(obj, val) + val = types.util.checkDtype('general_intracellular_ephys_intracellular_recordings', 'types.core.IntracellularRecordingsTable', val); + end + function val = validate_general_intracellular_ephys_repetitions(obj, val) + val = types.util.checkDtype('general_intracellular_ephys_repetitions', 'types.core.RepetitionsTable', val); + end + function val = validate_general_intracellular_ephys_sequential_recordings(obj, val) + val = types.util.checkDtype('general_intracellular_ephys_sequential_recordings', 'types.core.SequentialRecordingsTable', val); + end + function val = validate_general_intracellular_ephys_simultaneous_recordings(obj, val) + val = types.util.checkDtype('general_intracellular_ephys_simultaneous_recordings', 'types.core.SimultaneousRecordingsTable', val); + end + function val = validate_general_intracellular_ephys_sweep_table(obj, val) + val = types.util.checkDtype('general_intracellular_ephys_sweep_table', 'types.core.SweepTable', val); + end + function val = validate_general_keywords(obj, val) + val = types.util.checkDtype('general_keywords', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_lab(obj, val) + val = types.util.checkDtype('general_lab', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_notes(obj, val) + val = types.util.checkDtype('general_notes', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_optogenetics(obj, val) + namedprops = struct(); + constrained = {'types.core.OptogeneticStimulusSite'}; + types.util.checkSet('general_optogenetics', namedprops, constrained, val); + end + function val = validate_general_optophysiology(obj, val) + namedprops = struct(); + constrained = {'types.core.ImagingPlane'}; + types.util.checkSet('general_optophysiology', namedprops, constrained, val); + end + function val = validate_general_pharmacology(obj, val) + val = types.util.checkDtype('general_pharmacology', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_protocol(obj, val) + val = types.util.checkDtype('general_protocol', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_related_publications(obj, val) + val = types.util.checkDtype('general_related_publications', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_session_id(obj, val) + val = types.util.checkDtype('general_session_id', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_slices(obj, val) + val = types.util.checkDtype('general_slices', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_source_script(obj, val) + val = types.util.checkDtype('general_source_script', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_source_script_file_name(obj, val) + val = types.util.checkDtype('general_source_script_file_name', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_stimulus(obj, val) + val = types.util.checkDtype('general_stimulus', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_subject(obj, val) + val = types.util.checkDtype('general_subject', 'types.core.Subject', val); + end + function val = validate_general_surgery(obj, val) + val = types.util.checkDtype('general_surgery', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_general_virus(obj, val) + val = types.util.checkDtype('general_virus', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_identifier(obj, val) + val = types.util.checkDtype('identifier', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_intervals(obj, val) + namedprops = struct(); + constrained = {'types.core.TimeIntervals'}; + types.util.checkSet('intervals', namedprops, constrained, val); + end + function val = validate_intervals_epochs(obj, val) + val = types.util.checkDtype('intervals_epochs', 'types.core.TimeIntervals', val); + end + function val = validate_intervals_invalid_times(obj, val) + val = types.util.checkDtype('intervals_invalid_times', 'types.core.TimeIntervals', val); + end + function val = validate_intervals_trials(obj, val) + val = types.util.checkDtype('intervals_trials', 'types.core.TimeIntervals', val); + end + function val = validate_processing(obj, val) + namedprops = struct(); + constrained = {'types.core.ProcessingModule'}; + types.util.checkSet('processing', namedprops, constrained, val); + end + function val = validate_scratch(obj, val) + constrained = {'types.hdmf_common.DynamicTable', 'types.core.NWBContainer', 'types.core.ScratchData'}; + types.util.checkSet('scratch', struct(), constrained, val); + end + function val = validate_session_description(obj, val) + val = types.util.checkDtype('session_description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_session_start_time(obj, val) + val = types.util.checkDtype('session_start_time', 'datetime', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_stimulus_presentation(obj, val) + namedprops = struct(); + constrained = {'types.core.TimeSeries'}; + types.util.checkSet('stimulus_presentation', namedprops, constrained, val); + end + function val = validate_stimulus_templates(obj, val) + constrained = {'types.core.Images', 'types.core.TimeSeries'}; + types.util.checkSet('stimulus_templates', struct(), constrained, val); + end + function val = validate_timestamps_reference_time(obj, val) + val = types.util.checkDtype('timestamps_reference_time', 'datetime', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_units(obj, val) + val = types.util.checkDtype('units', 'types.core.Units', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + fullpath = ''; + refs = obj.acquisition.export(fid, [fullpath '/acquisition'], refs); + refs = obj.analysis.export(fid, [fullpath '/analysis'], refs); + if startsWith(class(obj.file_create_date), 'types.untyped.') + refs = obj.file_create_date.export(fid, [fullpath '/file_create_date'], refs); + elseif ~isempty(obj.file_create_date) + io.writeDataset(fid, [fullpath '/file_create_date'], obj.file_create_date, 'forceChunking', 'forceArray'); + end + refs = obj.general.export(fid, [fullpath '/general'], refs); + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_data_collection) + if startsWith(class(obj.general_data_collection), 'types.untyped.') + refs = obj.general_data_collection.export(fid, [fullpath '/general/data_collection'], refs); + elseif ~isempty(obj.general_data_collection) + io.writeDataset(fid, [fullpath '/general/data_collection'], obj.general_data_collection); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_devices) + refs = obj.general_devices.export(fid, [fullpath '/general/devices'], refs); + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_experiment_description) + if startsWith(class(obj.general_experiment_description), 'types.untyped.') + refs = obj.general_experiment_description.export(fid, [fullpath '/general/experiment_description'], refs); + elseif ~isempty(obj.general_experiment_description) + io.writeDataset(fid, [fullpath '/general/experiment_description'], obj.general_experiment_description); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_experimenter) + if startsWith(class(obj.general_experimenter), 'types.untyped.') + refs = obj.general_experimenter.export(fid, [fullpath '/general/experimenter'], refs); + elseif ~isempty(obj.general_experimenter) + io.writeDataset(fid, [fullpath '/general/experimenter'], obj.general_experimenter, 'forceArray'); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_extracellular_ephys) + refs = obj.general_extracellular_ephys.export(fid, [fullpath '/general/extracellular_ephys'], refs); + end + io.writeGroup(fid, [fullpath '/general/extracellular_ephys']); + if ~isempty(obj.general_extracellular_ephys_electrodes) + refs = obj.general_extracellular_ephys_electrodes.export(fid, [fullpath '/general/extracellular_ephys/electrodes'], refs); + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_institution) + if startsWith(class(obj.general_institution), 'types.untyped.') + refs = obj.general_institution.export(fid, [fullpath '/general/institution'], refs); + elseif ~isempty(obj.general_institution) + io.writeDataset(fid, [fullpath '/general/institution'], obj.general_institution); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_intracellular_ephys) + refs = obj.general_intracellular_ephys.export(fid, [fullpath '/general/intracellular_ephys'], refs); + end + io.writeGroup(fid, [fullpath '/general/intracellular_ephys']); + if ~isempty(obj.general_intracellular_ephys_experimental_conditions) + refs = obj.general_intracellular_ephys_experimental_conditions.export(fid, [fullpath '/general/intracellular_ephys/experimental_conditions'], refs); + end + io.writeGroup(fid, [fullpath '/general/intracellular_ephys']); + if ~isempty(obj.general_intracellular_ephys_filtering) + if startsWith(class(obj.general_intracellular_ephys_filtering), 'types.untyped.') + refs = obj.general_intracellular_ephys_filtering.export(fid, [fullpath '/general/intracellular_ephys/filtering'], refs); + elseif ~isempty(obj.general_intracellular_ephys_filtering) + io.writeDataset(fid, [fullpath '/general/intracellular_ephys/filtering'], obj.general_intracellular_ephys_filtering); + end + end + io.writeGroup(fid, [fullpath '/general/intracellular_ephys']); + if ~isempty(obj.general_intracellular_ephys_intracellular_recordings) + refs = obj.general_intracellular_ephys_intracellular_recordings.export(fid, [fullpath '/general/intracellular_ephys/intracellular_recordings'], refs); + end + io.writeGroup(fid, [fullpath '/general/intracellular_ephys']); + if ~isempty(obj.general_intracellular_ephys_repetitions) + refs = obj.general_intracellular_ephys_repetitions.export(fid, [fullpath '/general/intracellular_ephys/repetitions'], refs); + end + io.writeGroup(fid, [fullpath '/general/intracellular_ephys']); + if ~isempty(obj.general_intracellular_ephys_sequential_recordings) + refs = obj.general_intracellular_ephys_sequential_recordings.export(fid, [fullpath '/general/intracellular_ephys/sequential_recordings'], refs); + end + io.writeGroup(fid, [fullpath '/general/intracellular_ephys']); + if ~isempty(obj.general_intracellular_ephys_simultaneous_recordings) + refs = obj.general_intracellular_ephys_simultaneous_recordings.export(fid, [fullpath '/general/intracellular_ephys/simultaneous_recordings'], refs); + end + io.writeGroup(fid, [fullpath '/general/intracellular_ephys']); + if ~isempty(obj.general_intracellular_ephys_sweep_table) + refs = obj.general_intracellular_ephys_sweep_table.export(fid, [fullpath '/general/intracellular_ephys/sweep_table'], refs); + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_keywords) + if startsWith(class(obj.general_keywords), 'types.untyped.') + refs = obj.general_keywords.export(fid, [fullpath '/general/keywords'], refs); + elseif ~isempty(obj.general_keywords) + io.writeDataset(fid, [fullpath '/general/keywords'], obj.general_keywords, 'forceArray'); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_lab) + if startsWith(class(obj.general_lab), 'types.untyped.') + refs = obj.general_lab.export(fid, [fullpath '/general/lab'], refs); + elseif ~isempty(obj.general_lab) + io.writeDataset(fid, [fullpath '/general/lab'], obj.general_lab); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_notes) + if startsWith(class(obj.general_notes), 'types.untyped.') + refs = obj.general_notes.export(fid, [fullpath '/general/notes'], refs); + elseif ~isempty(obj.general_notes) + io.writeDataset(fid, [fullpath '/general/notes'], obj.general_notes); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_optogenetics) + refs = obj.general_optogenetics.export(fid, [fullpath '/general/optogenetics'], refs); + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_optophysiology) + refs = obj.general_optophysiology.export(fid, [fullpath '/general/optophysiology'], refs); + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_pharmacology) + if startsWith(class(obj.general_pharmacology), 'types.untyped.') + refs = obj.general_pharmacology.export(fid, [fullpath '/general/pharmacology'], refs); + elseif ~isempty(obj.general_pharmacology) + io.writeDataset(fid, [fullpath '/general/pharmacology'], obj.general_pharmacology); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_protocol) + if startsWith(class(obj.general_protocol), 'types.untyped.') + refs = obj.general_protocol.export(fid, [fullpath '/general/protocol'], refs); + elseif ~isempty(obj.general_protocol) + io.writeDataset(fid, [fullpath '/general/protocol'], obj.general_protocol); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_related_publications) + if startsWith(class(obj.general_related_publications), 'types.untyped.') + refs = obj.general_related_publications.export(fid, [fullpath '/general/related_publications'], refs); + elseif ~isempty(obj.general_related_publications) + io.writeDataset(fid, [fullpath '/general/related_publications'], obj.general_related_publications, 'forceArray'); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_session_id) + if startsWith(class(obj.general_session_id), 'types.untyped.') + refs = obj.general_session_id.export(fid, [fullpath '/general/session_id'], refs); + elseif ~isempty(obj.general_session_id) + io.writeDataset(fid, [fullpath '/general/session_id'], obj.general_session_id); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_slices) + if startsWith(class(obj.general_slices), 'types.untyped.') + refs = obj.general_slices.export(fid, [fullpath '/general/slices'], refs); + elseif ~isempty(obj.general_slices) + io.writeDataset(fid, [fullpath '/general/slices'], obj.general_slices); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_source_script) + if startsWith(class(obj.general_source_script), 'types.untyped.') + refs = obj.general_source_script.export(fid, [fullpath '/general/source_script'], refs); + elseif ~isempty(obj.general_source_script) + io.writeDataset(fid, [fullpath '/general/source_script'], obj.general_source_script); + end + end + if ~isempty(obj.general_source_script) && ~isa(obj.general_source_script, 'types.untyped.SoftLink') && ~isa(obj.general_source_script, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/general/source_script/file_name'], obj.general_source_script_file_name); + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_stimulus) + if startsWith(class(obj.general_stimulus), 'types.untyped.') + refs = obj.general_stimulus.export(fid, [fullpath '/general/stimulus'], refs); + elseif ~isempty(obj.general_stimulus) + io.writeDataset(fid, [fullpath '/general/stimulus'], obj.general_stimulus); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_subject) + refs = obj.general_subject.export(fid, [fullpath '/general/subject'], refs); + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_surgery) + if startsWith(class(obj.general_surgery), 'types.untyped.') + refs = obj.general_surgery.export(fid, [fullpath '/general/surgery'], refs); + elseif ~isempty(obj.general_surgery) + io.writeDataset(fid, [fullpath '/general/surgery'], obj.general_surgery); + end + end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_virus) + if startsWith(class(obj.general_virus), 'types.untyped.') + refs = obj.general_virus.export(fid, [fullpath '/general/virus'], refs); + elseif ~isempty(obj.general_virus) + io.writeDataset(fid, [fullpath '/general/virus'], obj.general_virus); + end + end + if startsWith(class(obj.identifier), 'types.untyped.') + refs = obj.identifier.export(fid, [fullpath '/identifier'], refs); + elseif ~isempty(obj.identifier) + io.writeDataset(fid, [fullpath '/identifier'], obj.identifier); + end + if ~isempty(obj.intervals) + refs = obj.intervals.export(fid, [fullpath '/intervals'], refs); + end + io.writeGroup(fid, [fullpath '/intervals']); + if ~isempty(obj.intervals_epochs) + refs = obj.intervals_epochs.export(fid, [fullpath '/intervals/epochs'], refs); + end + io.writeGroup(fid, [fullpath '/intervals']); + if ~isempty(obj.intervals_invalid_times) + refs = obj.intervals_invalid_times.export(fid, [fullpath '/intervals/invalid_times'], refs); + end + io.writeGroup(fid, [fullpath '/intervals']); + if ~isempty(obj.intervals_trials) + refs = obj.intervals_trials.export(fid, [fullpath '/intervals/trials'], refs); + end + io.writeAttribute(fid, [fullpath '/nwb_version'], obj.nwb_version); + refs = obj.processing.export(fid, [fullpath '/processing'], refs); + if ~isempty(obj.scratch) + refs = obj.scratch.export(fid, [fullpath '/scratch'], refs); + end + if startsWith(class(obj.session_description), 'types.untyped.') + refs = obj.session_description.export(fid, [fullpath '/session_description'], refs); + elseif ~isempty(obj.session_description) + io.writeDataset(fid, [fullpath '/session_description'], obj.session_description); + end + if startsWith(class(obj.session_start_time), 'types.untyped.') + refs = obj.session_start_time.export(fid, [fullpath '/session_start_time'], refs); + elseif ~isempty(obj.session_start_time) + io.writeDataset(fid, [fullpath '/session_start_time'], obj.session_start_time); + end + io.writeGroup(fid, [fullpath '/stimulus']); + refs = obj.stimulus_presentation.export(fid, [fullpath '/stimulus/presentation'], refs); + io.writeGroup(fid, [fullpath '/stimulus']); + refs = obj.stimulus_templates.export(fid, [fullpath '/stimulus/templates'], refs); + if startsWith(class(obj.timestamps_reference_time), 'types.untyped.') + refs = obj.timestamps_reference_time.export(fid, [fullpath '/timestamps_reference_time'], refs); + elseif ~isempty(obj.timestamps_reference_time) + io.writeDataset(fid, [fullpath '/timestamps_reference_time'], obj.timestamps_reference_time); + end + if ~isempty(obj.units) + refs = obj.units.export(fid, [fullpath '/units'], refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/OnePhotonSeries.m b/+types/+core/OnePhotonSeries.m new file mode 100644 index 00000000..ed410612 --- /dev/null +++ b/+types/+core/OnePhotonSeries.m @@ -0,0 +1,209 @@ +classdef OnePhotonSeries < types.core.ImageSeries & types.untyped.GroupClass +% ONEPHOTONSERIES Image stack recorded over time from 1-photon microscope. + + +% OPTIONAL PROPERTIES +properties + binning; % (uint8) Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. + exposure_time; % (single) Exposure time of the sample; often the inverse of the frequency. + imaging_plane; % ImagingPlane + intensity; % (single) Intensity of the excitation in mW/mm^2, if known. + pmt_gain; % (single) Photomultiplier gain. + power; % (single) Power of the excitation in mW, if known. + scan_line_rate; % (single) Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. +end + +methods + function obj = OnePhotonSeries(varargin) + % ONEPHOTONSERIES Constructor for OnePhotonSeries + obj = obj@types.core.ImageSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'binning',[]); + addParameter(p, 'exposure_time',[]); + addParameter(p, 'imaging_plane',[]); + addParameter(p, 'intensity',[]); + addParameter(p, 'pmt_gain',[]); + addParameter(p, 'power',[]); + addParameter(p, 'scan_line_rate',[]); + misc.parseSkipInvalidName(p, varargin); + obj.binning = p.Results.binning; + obj.exposure_time = p.Results.exposure_time; + obj.imaging_plane = p.Results.imaging_plane; + obj.intensity = p.Results.intensity; + obj.pmt_gain = p.Results.pmt_gain; + obj.power = p.Results.power; + obj.scan_line_rate = p.Results.scan_line_rate; + if strcmp(class(obj), 'types.core.OnePhotonSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.binning(obj, val) + obj.binning = obj.validate_binning(val); + end + function set.exposure_time(obj, val) + obj.exposure_time = obj.validate_exposure_time(val); + end + function set.imaging_plane(obj, val) + obj.imaging_plane = obj.validate_imaging_plane(val); + end + function set.intensity(obj, val) + obj.intensity = obj.validate_intensity(val); + end + function set.pmt_gain(obj, val) + obj.pmt_gain = obj.validate_pmt_gain(val); + end + function set.power(obj, val) + obj.power = obj.validate_power(val); + end + function set.scan_line_rate(obj, val) + obj.scan_line_rate = obj.validate_scan_line_rate(val); + end + %% VALIDATORS + + function val = validate_binning(obj, val) + val = types.util.checkDtype('binning', 'uint8', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_exposure_time(obj, val) + val = types.util.checkDtype('exposure_time', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_imaging_plane(obj, val) + val = types.util.checkDtype('imaging_plane', 'types.core.ImagingPlane', val); + end + function val = validate_intensity(obj, val) + val = types.util.checkDtype('intensity', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_pmt_gain(obj, val) + val = types.util.checkDtype('pmt_gain', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_power(obj, val) + val = types.util.checkDtype('power', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_scan_line_rate(obj, val) + val = types.util.checkDtype('scan_line_rate', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.binning) + io.writeAttribute(fid, [fullpath '/binning'], obj.binning); + end + if ~isempty(obj.exposure_time) + io.writeAttribute(fid, [fullpath '/exposure_time'], obj.exposure_time); + end + refs = obj.imaging_plane.export(fid, [fullpath '/imaging_plane'], refs); + if ~isempty(obj.intensity) + io.writeAttribute(fid, [fullpath '/intensity'], obj.intensity); + end + if ~isempty(obj.pmt_gain) + io.writeAttribute(fid, [fullpath '/pmt_gain'], obj.pmt_gain); + end + if ~isempty(obj.power) + io.writeAttribute(fid, [fullpath '/power'], obj.power); + end + if ~isempty(obj.scan_line_rate) + io.writeAttribute(fid, [fullpath '/scan_line_rate'], obj.scan_line_rate); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/OpticalChannel.m b/+types/+core/OpticalChannel.m new file mode 100644 index 00000000..038b43a0 --- /dev/null +++ b/+types/+core/OpticalChannel.m @@ -0,0 +1,95 @@ +classdef OpticalChannel < types.core.NWBContainer & types.untyped.GroupClass +% OPTICALCHANNEL An optical channel used to record from an imaging plane. + + +% REQUIRED PROPERTIES +properties + description; % REQUIRED (char) Description or other notes about the channel. + emission_lambda; % REQUIRED (single) Emission wavelength for channel, in nm. +end + +methods + function obj = OpticalChannel(varargin) + % OPTICALCHANNEL Constructor for OpticalChannel + obj = obj@types.core.NWBContainer(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'emission_lambda',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.emission_lambda = p.Results.emission_lambda; + if strcmp(class(obj), 'types.core.OpticalChannel') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.emission_lambda(obj, val) + obj.emission_lambda = obj.validate_emission_lambda(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_emission_lambda(obj, val) + val = types.util.checkDtype('emission_lambda', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if startsWith(class(obj.description), 'types.untyped.') + refs = obj.description.export(fid, [fullpath '/description'], refs); + elseif ~isempty(obj.description) + io.writeDataset(fid, [fullpath '/description'], obj.description); + end + if startsWith(class(obj.emission_lambda), 'types.untyped.') + refs = obj.emission_lambda.export(fid, [fullpath '/emission_lambda'], refs); + elseif ~isempty(obj.emission_lambda) + io.writeDataset(fid, [fullpath '/emission_lambda'], obj.emission_lambda); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/OpticalSeries.m b/+types/+core/OpticalSeries.m new file mode 100644 index 00000000..62a1fe82 --- /dev/null +++ b/+types/+core/OpticalSeries.m @@ -0,0 +1,150 @@ +classdef OpticalSeries < types.core.ImageSeries & types.untyped.GroupClass +% OPTICALSERIES Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important. + + +% OPTIONAL PROPERTIES +properties + distance; % (single) Distance from camera/monitor to target/eye. + field_of_view; % (single) Width, height and depth of image, or imaged area, in meters. + orientation; % (char) Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference. +end + +methods + function obj = OpticalSeries(varargin) + % OPTICALSERIES Constructor for OpticalSeries + obj = obj@types.core.ImageSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'distance',[]); + addParameter(p, 'field_of_view',[]); + addParameter(p, 'orientation',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.distance = p.Results.distance; + obj.field_of_view = p.Results.field_of_view; + obj.orientation = p.Results.orientation; + if strcmp(class(obj), 'types.core.OpticalSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.distance(obj, val) + obj.distance = obj.validate_distance(val); + end + function set.field_of_view(obj, val) + obj.field_of_view = obj.validate_field_of_view(val); + end + function set.orientation(obj, val) + obj.orientation = obj.validate_orientation(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3,Inf,Inf,Inf], [Inf,Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_distance(obj, val) + val = types.util.checkDtype('distance', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_field_of_view(obj, val) + val = types.util.checkDtype('field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3], [2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_orientation(obj, val) + val = types.util.checkDtype('orientation', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.distance) + if startsWith(class(obj.distance), 'types.untyped.') + refs = obj.distance.export(fid, [fullpath '/distance'], refs); + elseif ~isempty(obj.distance) + io.writeDataset(fid, [fullpath '/distance'], obj.distance); + end + end + if ~isempty(obj.field_of_view) + if startsWith(class(obj.field_of_view), 'types.untyped.') + refs = obj.field_of_view.export(fid, [fullpath '/field_of_view'], refs); + elseif ~isempty(obj.field_of_view) + io.writeDataset(fid, [fullpath '/field_of_view'], obj.field_of_view, 'forceArray'); + end + end + if ~isempty(obj.orientation) + if startsWith(class(obj.orientation), 'types.untyped.') + refs = obj.orientation.export(fid, [fullpath '/orientation'], refs); + elseif ~isempty(obj.orientation) + io.writeDataset(fid, [fullpath '/orientation'], obj.orientation); + end + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/OptogeneticSeries.m b/+types/+core/OptogeneticSeries.m new file mode 100644 index 00000000..837bbbc4 --- /dev/null +++ b/+types/+core/OptogeneticSeries.m @@ -0,0 +1,70 @@ +classdef OptogeneticSeries < types.core.TimeSeries & types.untyped.GroupClass +% OPTOGENETICSERIES An optogenetic stimulus. + + +% OPTIONAL PROPERTIES +properties + site; % OptogeneticStimulusSite +end + +methods + function obj = OptogeneticSeries(varargin) + % OPTOGENETICSERIES Constructor for OptogeneticSeries + varargin = [{'data_unit' 'watts'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'site',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.site = p.Results.site; + if strcmp(class(obj), 'types.core.OptogeneticSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.site(obj, val) + obj.site = obj.validate_site(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_site(obj, val) + val = types.util.checkDtype('site', 'types.core.OptogeneticStimulusSite', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.site.export(fid, [fullpath '/site'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/OptogeneticStimulusSite.m b/+types/+core/OptogeneticStimulusSite.m new file mode 100644 index 00000000..f463c067 --- /dev/null +++ b/+types/+core/OptogeneticStimulusSite.m @@ -0,0 +1,137 @@ +classdef OptogeneticStimulusSite < types.core.NWBContainer & types.untyped.GroupClass +% OPTOGENETICSTIMULUSSITE A site of optogenetic stimulation. + + +% REQUIRED PROPERTIES +properties + description; % REQUIRED (char) Description of stimulation site. + excitation_lambda; % REQUIRED (single) Excitation wavelength, in nm. + location; % REQUIRED (char) Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. +end +% OPTIONAL PROPERTIES +properties + device; % Device +end + +methods + function obj = OptogeneticStimulusSite(varargin) + % OPTOGENETICSTIMULUSSITE Constructor for OptogeneticStimulusSite + obj = obj@types.core.NWBContainer(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + addParameter(p, 'device',[]); + addParameter(p, 'excitation_lambda',[]); + addParameter(p, 'location',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + obj.device = p.Results.device; + obj.excitation_lambda = p.Results.excitation_lambda; + obj.location = p.Results.location; + if strcmp(class(obj), 'types.core.OptogeneticStimulusSite') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.device(obj, val) + obj.device = obj.validate_device(val); + end + function set.excitation_lambda(obj, val) + obj.excitation_lambda = obj.validate_excitation_lambda(val); + end + function set.location(obj, val) + obj.location = obj.validate_location(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_device(obj, val) + val = types.util.checkDtype('device', 'types.core.Device', val); + end + function val = validate_excitation_lambda(obj, val) + val = types.util.checkDtype('excitation_lambda', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_location(obj, val) + val = types.util.checkDtype('location', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if startsWith(class(obj.description), 'types.untyped.') + refs = obj.description.export(fid, [fullpath '/description'], refs); + elseif ~isempty(obj.description) + io.writeDataset(fid, [fullpath '/description'], obj.description); + end + refs = obj.device.export(fid, [fullpath '/device'], refs); + if startsWith(class(obj.excitation_lambda), 'types.untyped.') + refs = obj.excitation_lambda.export(fid, [fullpath '/excitation_lambda'], refs); + elseif ~isempty(obj.excitation_lambda) + io.writeDataset(fid, [fullpath '/excitation_lambda'], obj.excitation_lambda); + end + if startsWith(class(obj.location), 'types.untyped.') + refs = obj.location.export(fid, [fullpath '/location'], refs); + elseif ~isempty(obj.location) + io.writeDataset(fid, [fullpath '/location'], obj.location); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/PatchClampSeries.m b/+types/+core/PatchClampSeries.m new file mode 100644 index 00000000..28cc4446 --- /dev/null +++ b/+types/+core/PatchClampSeries.m @@ -0,0 +1,170 @@ +classdef PatchClampSeries < types.core.TimeSeries & types.untyped.GroupClass +% PATCHCLAMPSERIES An abstract base class for patch-clamp data - stimulus or response, current or voltage. + + +% OPTIONAL PROPERTIES +properties + electrode; % IntracellularElectrode + gain; % (single) Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + stimulus_description; % (char) Protocol/stimulus name for this patch-clamp dataset. + sweep_number; % (uint32) Sweep number, allows to group different PatchClampSeries together. +end + +methods + function obj = PatchClampSeries(varargin) + % PATCHCLAMPSERIES Constructor for PatchClampSeries + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'electrode',[]); + addParameter(p, 'gain',[]); + addParameter(p, 'stimulus_description',[]); + addParameter(p, 'sweep_number',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.electrode = p.Results.electrode; + obj.gain = p.Results.gain; + obj.stimulus_description = p.Results.stimulus_description; + obj.sweep_number = p.Results.sweep_number; + if strcmp(class(obj), 'types.core.PatchClampSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.electrode(obj, val) + obj.electrode = obj.validate_electrode(val); + end + function set.gain(obj, val) + obj.gain = obj.validate_gain(val); + end + function set.stimulus_description(obj, val) + obj.stimulus_description = obj.validate_stimulus_description(val); + end + function set.sweep_number(obj, val) + obj.sweep_number = obj.validate_sweep_number(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + val = types.util.checkDtype('data_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_electrode(obj, val) + val = types.util.checkDtype('electrode', 'types.core.IntracellularElectrode', val); + end + function val = validate_gain(obj, val) + val = types.util.checkDtype('gain', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_stimulus_description(obj, val) + val = types.util.checkDtype('stimulus_description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_sweep_number(obj, val) + val = types.util.checkDtype('sweep_number', 'uint32', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.electrode.export(fid, [fullpath '/electrode'], refs); + if ~isempty(obj.gain) + if startsWith(class(obj.gain), 'types.untyped.') + refs = obj.gain.export(fid, [fullpath '/gain'], refs); + elseif ~isempty(obj.gain) + io.writeDataset(fid, [fullpath '/gain'], obj.gain); + end + end + io.writeAttribute(fid, [fullpath '/stimulus_description'], obj.stimulus_description); + if ~isempty(obj.sweep_number) + io.writeAttribute(fid, [fullpath '/sweep_number'], obj.sweep_number); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/PlaneSegmentation.m b/+types/+core/PlaneSegmentation.m new file mode 100644 index 00000000..5fd95183 --- /dev/null +++ b/+types/+core/PlaneSegmentation.m @@ -0,0 +1,122 @@ +classdef PlaneSegmentation < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% PLANESEGMENTATION Results from image segmentation of a specific imaging plane. + + +% OPTIONAL PROPERTIES +properties + image_mask; % (VectorData) ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero. + imaging_plane; % ImagingPlane + pixel_mask; % (VectorData) Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + pixel_mask_index; % (VectorIndex) Index into pixel_mask. + reference_images; % (ImageSeries) One or more image stacks that the masks apply to (can be one-element stack). + voxel_mask; % (VectorData) Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + voxel_mask_index; % (VectorIndex) Index into voxel_mask. +end + +methods + function obj = PlaneSegmentation(varargin) + % PLANESEGMENTATION Constructor for PlaneSegmentation + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'image_mask',[]); + addParameter(p, 'imaging_plane',[]); + addParameter(p, 'pixel_mask',[]); + addParameter(p, 'pixel_mask_index',[]); + addParameter(p, 'reference_images',types.untyped.Set()); + addParameter(p, 'voxel_mask',[]); + addParameter(p, 'voxel_mask_index',[]); + misc.parseSkipInvalidName(p, varargin); + obj.image_mask = p.Results.image_mask; + obj.imaging_plane = p.Results.imaging_plane; + obj.pixel_mask = p.Results.pixel_mask; + obj.pixel_mask_index = p.Results.pixel_mask_index; + obj.reference_images = p.Results.reference_images; + obj.voxel_mask = p.Results.voxel_mask; + obj.voxel_mask_index = p.Results.voxel_mask_index; + if strcmp(class(obj), 'types.core.PlaneSegmentation') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.PlaneSegmentation') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.image_mask(obj, val) + obj.image_mask = obj.validate_image_mask(val); + end + function set.imaging_plane(obj, val) + obj.imaging_plane = obj.validate_imaging_plane(val); + end + function set.pixel_mask(obj, val) + obj.pixel_mask = obj.validate_pixel_mask(val); + end + function set.pixel_mask_index(obj, val) + obj.pixel_mask_index = obj.validate_pixel_mask_index(val); + end + function set.reference_images(obj, val) + obj.reference_images = obj.validate_reference_images(val); + end + function set.voxel_mask(obj, val) + obj.voxel_mask = obj.validate_voxel_mask(val); + end + function set.voxel_mask_index(obj, val) + obj.voxel_mask_index = obj.validate_voxel_mask_index(val); + end + %% VALIDATORS + + function val = validate_image_mask(obj, val) + val = types.util.checkDtype('image_mask', 'types.hdmf_common.VectorData', val); + end + function val = validate_imaging_plane(obj, val) + val = types.util.checkDtype('imaging_plane', 'types.core.ImagingPlane', val); + end + function val = validate_pixel_mask(obj, val) + val = types.util.checkDtype('pixel_mask', 'types.hdmf_common.VectorData', val); + end + function val = validate_pixel_mask_index(obj, val) + val = types.util.checkDtype('pixel_mask_index', 'types.hdmf_common.VectorIndex', val); + end + function val = validate_reference_images(obj, val) + namedprops = struct(); + constrained = {'types.core.ImageSeries'}; + types.util.checkSet('reference_images', namedprops, constrained, val); + end + function val = validate_voxel_mask(obj, val) + val = types.util.checkDtype('voxel_mask', 'types.hdmf_common.VectorData', val); + end + function val = validate_voxel_mask_index(obj, val) + val = types.util.checkDtype('voxel_mask_index', 'types.hdmf_common.VectorIndex', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.image_mask) + refs = obj.image_mask.export(fid, [fullpath '/image_mask'], refs); + end + refs = obj.imaging_plane.export(fid, [fullpath '/imaging_plane'], refs); + if ~isempty(obj.pixel_mask) + refs = obj.pixel_mask.export(fid, [fullpath '/pixel_mask'], refs); + end + if ~isempty(obj.pixel_mask_index) + refs = obj.pixel_mask_index.export(fid, [fullpath '/pixel_mask_index'], refs); + end + refs = obj.reference_images.export(fid, [fullpath '/reference_images'], refs); + if ~isempty(obj.voxel_mask) + refs = obj.voxel_mask.export(fid, [fullpath '/voxel_mask'], refs); + end + if ~isempty(obj.voxel_mask_index) + refs = obj.voxel_mask_index.export(fid, [fullpath '/voxel_mask_index'], refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Position.m b/+types/+core/Position.m new file mode 100644 index 00000000..58b4ebaf --- /dev/null +++ b/+types/+core/Position.m @@ -0,0 +1,48 @@ +classdef Position < types.core.NWBDataInterface & types.untyped.GroupClass +% POSITION Position data, whether along the x, x/y or x/y/z axis. + + +% REQUIRED PROPERTIES +properties + spatialseries; % REQUIRED (SpatialSeries) SpatialSeries object containing position data. +end + +methods + function obj = Position(varargin) + % POSITION Constructor for Position + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.spatialseries, ivarargin] = types.util.parseConstrained(obj,'spatialseries', 'types.core.SpatialSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.Position') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.spatialseries(obj, val) + obj.spatialseries = obj.validate_spatialseries(val); + end + %% VALIDATORS + + function val = validate_spatialseries(obj, val) + namedprops = struct(); + constrained = {'types.core.SpatialSeries'}; + types.util.checkSet('spatialseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.spatialseries.export(fid, fullpath, refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/ProcessingModule.m b/+types/+core/ProcessingModule.m new file mode 100644 index 00000000..c925acde --- /dev/null +++ b/+types/+core/ProcessingModule.m @@ -0,0 +1,89 @@ +classdef ProcessingModule < types.core.NWBContainer & types.untyped.GroupClass +% PROCESSINGMODULE A collection of processed data. + + +% OPTIONAL PROPERTIES +properties + description; % (char) Description of this collection of processed data. + dynamictable; % (DynamicTable) Tables stored in this collection. + nwbdatainterface; % (NWBDataInterface) Data objects stored in this collection. +end + +methods + function obj = ProcessingModule(varargin) + % PROCESSINGMODULE Constructor for ProcessingModule + obj = obj@types.core.NWBContainer(varargin{:}); + [obj.dynamictable, ivarargin] = types.util.parseConstrained(obj,'dynamictable', 'types.hdmf_common.DynamicTable', varargin{:}); + varargin(ivarargin) = []; + [obj.nwbdatainterface, ivarargin] = types.util.parseConstrained(obj,'nwbdatainterface', 'types.core.NWBDataInterface', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'description',[]); + misc.parseSkipInvalidName(p, varargin); + obj.description = p.Results.description; + if strcmp(class(obj), 'types.core.ProcessingModule') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.dynamictable(obj, val) + obj.dynamictable = obj.validate_dynamictable(val); + end + function set.nwbdatainterface(obj, val) + obj.nwbdatainterface = obj.validate_nwbdatainterface(val); + end + %% VALIDATORS + + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_dynamictable(obj, val) + namedprops = struct(); + constrained = {'types.hdmf_common.DynamicTable'}; + types.util.checkSet('dynamictable', namedprops, constrained, val); + end + function val = validate_nwbdatainterface(obj, val) + namedprops = struct(); + constrained = {'types.core.NWBDataInterface'}; + types.util.checkSet('nwbdatainterface', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/description'], obj.description); + if ~isempty(obj.dynamictable) + refs = obj.dynamictable.export(fid, fullpath, refs); + end + if ~isempty(obj.nwbdatainterface) + refs = obj.nwbdatainterface.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/PupilTracking.m b/+types/+core/PupilTracking.m new file mode 100644 index 00000000..f7018341 --- /dev/null +++ b/+types/+core/PupilTracking.m @@ -0,0 +1,48 @@ +classdef PupilTracking < types.core.NWBDataInterface & types.untyped.GroupClass +% PUPILTRACKING Eye-tracking data, representing pupil size. + + +% REQUIRED PROPERTIES +properties + timeseries; % REQUIRED (TimeSeries) TimeSeries object containing time series data on pupil size. +end + +methods + function obj = PupilTracking(varargin) + % PUPILTRACKING Constructor for PupilTracking + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.timeseries, ivarargin] = types.util.parseConstrained(obj,'timeseries', 'types.core.TimeSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.PupilTracking') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.timeseries(obj, val) + obj.timeseries = obj.validate_timeseries(val); + end + %% VALIDATORS + + function val = validate_timeseries(obj, val) + namedprops = struct(); + constrained = {'types.core.TimeSeries'}; + types.util.checkSet('timeseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.timeseries.export(fid, fullpath, refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/RGBAImage.m b/+types/+core/RGBAImage.m new file mode 100644 index 00000000..3ae47ea7 --- /dev/null +++ b/+types/+core/RGBAImage.m @@ -0,0 +1,40 @@ +classdef RGBAImage < types.core.Image & types.untyped.DatasetClass +% RGBAIMAGE A color image with transparency. + + + +methods + function obj = RGBAImage(varargin) + % RGBAIMAGE Constructor for RGBAImage + obj = obj@types.core.Image(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + if strcmp(class(obj), 'types.core.RGBAImage') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.Image(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/RGBImage.m b/+types/+core/RGBImage.m new file mode 100644 index 00000000..5aa72807 --- /dev/null +++ b/+types/+core/RGBImage.m @@ -0,0 +1,40 @@ +classdef RGBImage < types.core.Image & types.untyped.DatasetClass +% RGBIMAGE A color image. + + + +methods + function obj = RGBImage(varargin) + % RGBIMAGE Constructor for RGBImage + obj = obj@types.core.Image(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + if strcmp(class(obj), 'types.core.RGBImage') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.Image(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/RepetitionsTable.m b/+types/+core/RepetitionsTable.m new file mode 100644 index 00000000..a7d3b163 --- /dev/null +++ b/+types/+core/RepetitionsTable.m @@ -0,0 +1,60 @@ +classdef RepetitionsTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% REPETITIONSTABLE A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. + + +% REQUIRED PROPERTIES +properties + sequential_recordings; % REQUIRED (DynamicTableRegion) A reference to one or more rows in the SequentialRecordingsTable table. + sequential_recordings_index; % REQUIRED (VectorIndex) Index dataset for the sequential_recordings column. +end + +methods + function obj = RepetitionsTable(varargin) + % REPETITIONSTABLE Constructor for RepetitionsTable + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'sequential_recordings',[]); + addParameter(p, 'sequential_recordings_index',[]); + misc.parseSkipInvalidName(p, varargin); + obj.sequential_recordings = p.Results.sequential_recordings; + obj.sequential_recordings_index = p.Results.sequential_recordings_index; + if strcmp(class(obj), 'types.core.RepetitionsTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.RepetitionsTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.sequential_recordings(obj, val) + obj.sequential_recordings = obj.validate_sequential_recordings(val); + end + function set.sequential_recordings_index(obj, val) + obj.sequential_recordings_index = obj.validate_sequential_recordings_index(val); + end + %% VALIDATORS + + function val = validate_sequential_recordings(obj, val) + val = types.util.checkDtype('sequential_recordings', 'types.hdmf_common.DynamicTableRegion', val); + end + function val = validate_sequential_recordings_index(obj, val) + val = types.util.checkDtype('sequential_recordings_index', 'types.hdmf_common.VectorIndex', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.sequential_recordings.export(fid, [fullpath '/sequential_recordings'], refs); + refs = obj.sequential_recordings_index.export(fid, [fullpath '/sequential_recordings_index'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/RoiResponseSeries.m b/+types/+core/RoiResponseSeries.m new file mode 100644 index 00000000..452d27b1 --- /dev/null +++ b/+types/+core/RoiResponseSeries.m @@ -0,0 +1,67 @@ +classdef RoiResponseSeries < types.core.TimeSeries & types.untyped.GroupClass +% ROIRESPONSESERIES ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs. + + +% REQUIRED PROPERTIES +properties + rois; % REQUIRED (DynamicTableRegion) DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. +end + +methods + function obj = RoiResponseSeries(varargin) + % ROIRESPONSESERIES Constructor for RoiResponseSeries + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'rois',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.rois = p.Results.rois; + if strcmp(class(obj), 'types.core.RoiResponseSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.rois(obj, val) + obj.rois = obj.validate_rois(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf], [Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_rois(obj, val) + val = types.util.checkDtype('rois', 'types.hdmf_common.DynamicTableRegion', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.rois.export(fid, [fullpath '/rois'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/ScratchData.m b/+types/+core/ScratchData.m new file mode 100644 index 00000000..0d7923b3 --- /dev/null +++ b/+types/+core/ScratchData.m @@ -0,0 +1,66 @@ +classdef ScratchData < types.core.NWBData & types.untyped.DatasetClass +% SCRATCHDATA Any one-off datasets + + +% OPTIONAL PROPERTIES +properties + notes; % (char) Any notes the user has about the dataset being stored +end + +methods + function obj = ScratchData(varargin) + % SCRATCHDATA Constructor for ScratchData + obj = obj@types.core.NWBData(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'notes',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.notes = p.Results.notes; + if strcmp(class(obj), 'types.core.ScratchData') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.notes(obj, val) + obj.notes = obj.validate_notes(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + end + function val = validate_notes(obj, val) + val = types.util.checkDtype('notes', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBData(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/notes'], obj.notes); + end +end + +end \ No newline at end of file diff --git a/+types/+core/SequentialRecordingsTable.m b/+types/+core/SequentialRecordingsTable.m new file mode 100644 index 00000000..edcb85ab --- /dev/null +++ b/+types/+core/SequentialRecordingsTable.m @@ -0,0 +1,70 @@ +classdef SequentialRecordingsTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% SEQUENTIALRECORDINGSTABLE A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence. + + +% REQUIRED PROPERTIES +properties + simultaneous_recordings; % REQUIRED (DynamicTableRegion) A reference to one or more rows in the SimultaneousRecordingsTable table. + simultaneous_recordings_index; % REQUIRED (VectorIndex) Index dataset for the simultaneous_recordings column. + stimulus_type; % REQUIRED (VectorData) The type of stimulus used for the sequential recording. +end + +methods + function obj = SequentialRecordingsTable(varargin) + % SEQUENTIALRECORDINGSTABLE Constructor for SequentialRecordingsTable + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'simultaneous_recordings',[]); + addParameter(p, 'simultaneous_recordings_index',[]); + addParameter(p, 'stimulus_type',[]); + misc.parseSkipInvalidName(p, varargin); + obj.simultaneous_recordings = p.Results.simultaneous_recordings; + obj.simultaneous_recordings_index = p.Results.simultaneous_recordings_index; + obj.stimulus_type = p.Results.stimulus_type; + if strcmp(class(obj), 'types.core.SequentialRecordingsTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.SequentialRecordingsTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.simultaneous_recordings(obj, val) + obj.simultaneous_recordings = obj.validate_simultaneous_recordings(val); + end + function set.simultaneous_recordings_index(obj, val) + obj.simultaneous_recordings_index = obj.validate_simultaneous_recordings_index(val); + end + function set.stimulus_type(obj, val) + obj.stimulus_type = obj.validate_stimulus_type(val); + end + %% VALIDATORS + + function val = validate_simultaneous_recordings(obj, val) + val = types.util.checkDtype('simultaneous_recordings', 'types.hdmf_common.DynamicTableRegion', val); + end + function val = validate_simultaneous_recordings_index(obj, val) + val = types.util.checkDtype('simultaneous_recordings_index', 'types.hdmf_common.VectorIndex', val); + end + function val = validate_stimulus_type(obj, val) + val = types.util.checkDtype('stimulus_type', 'types.hdmf_common.VectorData', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.simultaneous_recordings.export(fid, [fullpath '/simultaneous_recordings'], refs); + refs = obj.simultaneous_recordings_index.export(fid, [fullpath '/simultaneous_recordings_index'], refs); + refs = obj.stimulus_type.export(fid, [fullpath '/stimulus_type'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/SimultaneousRecordingsTable.m b/+types/+core/SimultaneousRecordingsTable.m new file mode 100644 index 00000000..c4c1fddc --- /dev/null +++ b/+types/+core/SimultaneousRecordingsTable.m @@ -0,0 +1,60 @@ +classdef SimultaneousRecordingsTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% SIMULTANEOUSRECORDINGSTABLE A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes. + + +% REQUIRED PROPERTIES +properties + recordings; % REQUIRED (DynamicTableRegion) A reference to one or more rows in the IntracellularRecordingsTable table. + recordings_index; % REQUIRED (VectorIndex) Index dataset for the recordings column. +end + +methods + function obj = SimultaneousRecordingsTable(varargin) + % SIMULTANEOUSRECORDINGSTABLE Constructor for SimultaneousRecordingsTable + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'recordings',[]); + addParameter(p, 'recordings_index',[]); + misc.parseSkipInvalidName(p, varargin); + obj.recordings = p.Results.recordings; + obj.recordings_index = p.Results.recordings_index; + if strcmp(class(obj), 'types.core.SimultaneousRecordingsTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.SimultaneousRecordingsTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.recordings(obj, val) + obj.recordings = obj.validate_recordings(val); + end + function set.recordings_index(obj, val) + obj.recordings_index = obj.validate_recordings_index(val); + end + %% VALIDATORS + + function val = validate_recordings(obj, val) + val = types.util.checkDtype('recordings', 'types.hdmf_common.DynamicTableRegion', val); + end + function val = validate_recordings_index(obj, val) + val = types.util.checkDtype('recordings_index', 'types.hdmf_common.VectorIndex', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.recordings.export(fid, [fullpath '/recordings'], refs); + refs = obj.recordings_index.export(fid, [fullpath '/recordings_index'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/SpatialSeries.m b/+types/+core/SpatialSeries.m new file mode 100644 index 00000000..ce027498 --- /dev/null +++ b/+types/+core/SpatialSeries.m @@ -0,0 +1,109 @@ +classdef SpatialSeries < types.core.TimeSeries & types.untyped.GroupClass +% SPATIALSERIES Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values. + + +% OPTIONAL PROPERTIES +properties + reference_frame; % (char) Description defining what exactly 'straight-ahead' means. +end + +methods + function obj = SpatialSeries(varargin) + % SPATIALSERIES Constructor for SpatialSeries + varargin = [{'data_unit' 'meters'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'reference_frame',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.reference_frame = p.Results.reference_frame; + if strcmp(class(obj), 'types.core.SpatialSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.reference_frame(obj, val) + obj.reference_frame = obj.validate_reference_frame(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3,Inf], [2,Inf], [1,Inf], [Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + val = types.util.checkDtype('data_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_reference_frame(obj, val) + val = types.util.checkDtype('reference_frame', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.reference_frame) + if startsWith(class(obj.reference_frame), 'types.untyped.') + refs = obj.reference_frame.export(fid, [fullpath '/reference_frame'], refs); + elseif ~isempty(obj.reference_frame) + io.writeDataset(fid, [fullpath '/reference_frame'], obj.reference_frame); + end + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/SpikeEventSeries.m b/+types/+core/SpikeEventSeries.m new file mode 100644 index 00000000..0b57b0fa --- /dev/null +++ b/+types/+core/SpikeEventSeries.m @@ -0,0 +1,82 @@ +classdef SpikeEventSeries < types.core.ElectricalSeries & types.untyped.GroupClass +% SPIKEEVENTSERIES Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). + + + +methods + function obj = SpikeEventSeries(varargin) + % SPIKEEVENTSERIES Constructor for SpikeEventSeries + varargin = [{'data_unit' 'volts' 'timestamps_interval' types.util.correctType(1, 'int32') 'timestamps_unit' 'seconds'} varargin]; + obj = obj@types.core.ElectricalSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'timestamps',[]); + addParameter(p, 'timestamps_interval',[]); + addParameter(p, 'timestamps_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.timestamps = p.Results.timestamps; + obj.timestamps_interval = p.Results.timestamps_interval; + obj.timestamps_unit = p.Results.timestamps_unit; + if strcmp(class(obj), 'types.core.SpikeEventSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf,Inf], [Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_timestamps(obj, val) + val = types.util.checkDtype('timestamps', 'double', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ElectricalSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Subject.m b/+types/+core/Subject.m new file mode 100644 index 00000000..bfd4e1ae --- /dev/null +++ b/+types/+core/Subject.m @@ -0,0 +1,344 @@ +classdef Subject < types.core.NWBContainer & types.untyped.GroupClass +% SUBJECT Information about the animal or person from which the data was measured. + + +% OPTIONAL PROPERTIES +properties + age; % (char) Age of subject. Can be supplied instead of 'date_of_birth'. + age_reference; % (char) Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied. + date_of_birth; % (datetime) Date of birth of subject. Can be supplied instead of 'age'. + description; % (char) Description of subject and where subject came from (e.g., breeder, if animal). + genotype; % (char) Genetic strain. If absent, assume Wild Type (WT). + sex; % (char) Gender of subject. + species; % (char) Species of subject. + strain; % (char) Strain of subject. + subject_id; % (char) ID of animal/person used/participating in experiment (lab convention). + weight; % (char) Weight at time of experiment, at time of surgery and at other important times. +end + +methods + function obj = Subject(varargin) + % SUBJECT Constructor for Subject + varargin = [{'age_reference' 'birth'} varargin]; + obj = obj@types.core.NWBContainer(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'age',[]); + addParameter(p, 'age_reference',[]); + addParameter(p, 'date_of_birth',[]); + addParameter(p, 'description',[]); + addParameter(p, 'genotype',[]); + addParameter(p, 'sex',[]); + addParameter(p, 'species',[]); + addParameter(p, 'strain',[]); + addParameter(p, 'subject_id',[]); + addParameter(p, 'weight',[]); + misc.parseSkipInvalidName(p, varargin); + obj.age = p.Results.age; + obj.age_reference = p.Results.age_reference; + obj.date_of_birth = p.Results.date_of_birth; + obj.description = p.Results.description; + obj.genotype = p.Results.genotype; + obj.sex = p.Results.sex; + obj.species = p.Results.species; + obj.strain = p.Results.strain; + obj.subject_id = p.Results.subject_id; + obj.weight = p.Results.weight; + if strcmp(class(obj), 'types.core.Subject') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.age(obj, val) + obj.age = obj.validate_age(val); + end + function set.age_reference(obj, val) + obj.age_reference = obj.validate_age_reference(val); + end + function set.date_of_birth(obj, val) + obj.date_of_birth = obj.validate_date_of_birth(val); + end + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.genotype(obj, val) + obj.genotype = obj.validate_genotype(val); + end + function set.sex(obj, val) + obj.sex = obj.validate_sex(val); + end + function set.species(obj, val) + obj.species = obj.validate_species(val); + end + function set.strain(obj, val) + obj.strain = obj.validate_strain(val); + end + function set.subject_id(obj, val) + obj.subject_id = obj.validate_subject_id(val); + end + function set.weight(obj, val) + obj.weight = obj.validate_weight(val); + end + %% VALIDATORS + + function val = validate_age(obj, val) + val = types.util.checkDtype('age', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_age_reference(obj, val) + val = types.util.checkDtype('age_reference', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_date_of_birth(obj, val) + val = types.util.checkDtype('date_of_birth', 'datetime', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_genotype(obj, val) + val = types.util.checkDtype('genotype', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_sex(obj, val) + val = types.util.checkDtype('sex', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_species(obj, val) + val = types.util.checkDtype('species', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_strain(obj, val) + val = types.util.checkDtype('strain', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_subject_id(obj, val) + val = types.util.checkDtype('subject_id', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_weight(obj, val) + val = types.util.checkDtype('weight', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.age) + if startsWith(class(obj.age), 'types.untyped.') + refs = obj.age.export(fid, [fullpath '/age'], refs); + elseif ~isempty(obj.age) + io.writeDataset(fid, [fullpath '/age'], obj.age); + end + end + if ~isempty(obj.age) && ~isa(obj.age, 'types.untyped.SoftLink') && ~isa(obj.age, 'types.untyped.ExternalLink') && ~isempty(obj.age_reference) + io.writeAttribute(fid, [fullpath '/age/reference'], obj.age_reference); + end + if ~isempty(obj.date_of_birth) + if startsWith(class(obj.date_of_birth), 'types.untyped.') + refs = obj.date_of_birth.export(fid, [fullpath '/date_of_birth'], refs); + elseif ~isempty(obj.date_of_birth) + io.writeDataset(fid, [fullpath '/date_of_birth'], obj.date_of_birth); + end + end + if ~isempty(obj.description) + if startsWith(class(obj.description), 'types.untyped.') + refs = obj.description.export(fid, [fullpath '/description'], refs); + elseif ~isempty(obj.description) + io.writeDataset(fid, [fullpath '/description'], obj.description); + end + end + if ~isempty(obj.genotype) + if startsWith(class(obj.genotype), 'types.untyped.') + refs = obj.genotype.export(fid, [fullpath '/genotype'], refs); + elseif ~isempty(obj.genotype) + io.writeDataset(fid, [fullpath '/genotype'], obj.genotype); + end + end + if ~isempty(obj.sex) + if startsWith(class(obj.sex), 'types.untyped.') + refs = obj.sex.export(fid, [fullpath '/sex'], refs); + elseif ~isempty(obj.sex) + io.writeDataset(fid, [fullpath '/sex'], obj.sex); + end + end + if ~isempty(obj.species) + if startsWith(class(obj.species), 'types.untyped.') + refs = obj.species.export(fid, [fullpath '/species'], refs); + elseif ~isempty(obj.species) + io.writeDataset(fid, [fullpath '/species'], obj.species); + end + end + if ~isempty(obj.strain) + if startsWith(class(obj.strain), 'types.untyped.') + refs = obj.strain.export(fid, [fullpath '/strain'], refs); + elseif ~isempty(obj.strain) + io.writeDataset(fid, [fullpath '/strain'], obj.strain); + end + end + if ~isempty(obj.subject_id) + if startsWith(class(obj.subject_id), 'types.untyped.') + refs = obj.subject_id.export(fid, [fullpath '/subject_id'], refs); + elseif ~isempty(obj.subject_id) + io.writeDataset(fid, [fullpath '/subject_id'], obj.subject_id); + end + end + if ~isempty(obj.weight) + if startsWith(class(obj.weight), 'types.untyped.') + refs = obj.weight.export(fid, [fullpath '/weight'], refs); + elseif ~isempty(obj.weight) + io.writeDataset(fid, [fullpath '/weight'], obj.weight); + end + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/SweepTable.m b/+types/+core/SweepTable.m new file mode 100644 index 00000000..be7b8b81 --- /dev/null +++ b/+types/+core/SweepTable.m @@ -0,0 +1,70 @@ +classdef SweepTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% SWEEPTABLE [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata. + + +% REQUIRED PROPERTIES +properties + series; % REQUIRED (VectorData) The PatchClampSeries with the sweep number in that row. + series_index; % REQUIRED (VectorIndex) Index for series. + sweep_number; % REQUIRED (VectorData) Sweep number of the PatchClampSeries in that row. +end + +methods + function obj = SweepTable(varargin) + % SWEEPTABLE Constructor for SweepTable + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'series',[]); + addParameter(p, 'series_index',[]); + addParameter(p, 'sweep_number',[]); + misc.parseSkipInvalidName(p, varargin); + obj.series = p.Results.series; + obj.series_index = p.Results.series_index; + obj.sweep_number = p.Results.sweep_number; + if strcmp(class(obj), 'types.core.SweepTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.SweepTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.series(obj, val) + obj.series = obj.validate_series(val); + end + function set.series_index(obj, val) + obj.series_index = obj.validate_series_index(val); + end + function set.sweep_number(obj, val) + obj.sweep_number = obj.validate_sweep_number(val); + end + %% VALIDATORS + + function val = validate_series(obj, val) + val = types.util.checkDtype('series', 'types.hdmf_common.VectorData', val); + end + function val = validate_series_index(obj, val) + val = types.util.checkDtype('series_index', 'types.hdmf_common.VectorIndex', val); + end + function val = validate_sweep_number(obj, val) + val = types.util.checkDtype('sweep_number', 'types.hdmf_common.VectorData', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.series.export(fid, [fullpath '/series'], refs); + refs = obj.series_index.export(fid, [fullpath '/series_index'], refs); + refs = obj.sweep_number.export(fid, [fullpath '/sweep_number'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/TimeIntervals.m b/+types/+core/TimeIntervals.m new file mode 100644 index 00000000..74555b63 --- /dev/null +++ b/+types/+core/TimeIntervals.m @@ -0,0 +1,111 @@ +classdef TimeIntervals < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% TIMEINTERVALS A container for aggregating epoch data and the TimeSeries that each epoch applies to. + + +% REQUIRED PROPERTIES +properties + start_time; % REQUIRED (VectorData) Start time of epoch, in seconds. + stop_time; % REQUIRED (VectorData) Stop time of epoch, in seconds. +end +% OPTIONAL PROPERTIES +properties + tags; % (VectorData) User-defined tags that identify or categorize events. + tags_index; % (VectorIndex) Index for tags. + timeseries; % (TimeSeriesReferenceVectorData) An index into a TimeSeries object. + timeseries_index; % (VectorIndex) Index for timeseries. +end + +methods + function obj = TimeIntervals(varargin) + % TIMEINTERVALS Constructor for TimeIntervals + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'start_time',[]); + addParameter(p, 'stop_time',[]); + addParameter(p, 'tags',[]); + addParameter(p, 'tags_index',[]); + addParameter(p, 'timeseries',[]); + addParameter(p, 'timeseries_index',[]); + misc.parseSkipInvalidName(p, varargin); + obj.start_time = p.Results.start_time; + obj.stop_time = p.Results.stop_time; + obj.tags = p.Results.tags; + obj.tags_index = p.Results.tags_index; + obj.timeseries = p.Results.timeseries; + obj.timeseries_index = p.Results.timeseries_index; + if strcmp(class(obj), 'types.core.TimeIntervals') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.TimeIntervals') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.start_time(obj, val) + obj.start_time = obj.validate_start_time(val); + end + function set.stop_time(obj, val) + obj.stop_time = obj.validate_stop_time(val); + end + function set.tags(obj, val) + obj.tags = obj.validate_tags(val); + end + function set.tags_index(obj, val) + obj.tags_index = obj.validate_tags_index(val); + end + function set.timeseries(obj, val) + obj.timeseries = obj.validate_timeseries(val); + end + function set.timeseries_index(obj, val) + obj.timeseries_index = obj.validate_timeseries_index(val); + end + %% VALIDATORS + + function val = validate_start_time(obj, val) + val = types.util.checkDtype('start_time', 'types.hdmf_common.VectorData', val); + end + function val = validate_stop_time(obj, val) + val = types.util.checkDtype('stop_time', 'types.hdmf_common.VectorData', val); + end + function val = validate_tags(obj, val) + val = types.util.checkDtype('tags', 'types.hdmf_common.VectorData', val); + end + function val = validate_tags_index(obj, val) + val = types.util.checkDtype('tags_index', 'types.hdmf_common.VectorIndex', val); + end + function val = validate_timeseries(obj, val) + val = types.util.checkDtype('timeseries', 'types.core.TimeSeriesReferenceVectorData', val); + end + function val = validate_timeseries_index(obj, val) + val = types.util.checkDtype('timeseries_index', 'types.hdmf_common.VectorIndex', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.start_time.export(fid, [fullpath '/start_time'], refs); + refs = obj.stop_time.export(fid, [fullpath '/stop_time'], refs); + if ~isempty(obj.tags) + refs = obj.tags.export(fid, [fullpath '/tags'], refs); + end + if ~isempty(obj.tags_index) + refs = obj.tags_index.export(fid, [fullpath '/tags_index'], refs); + end + if ~isempty(obj.timeseries) + refs = obj.timeseries.export(fid, [fullpath '/timeseries'], refs); + end + if ~isempty(obj.timeseries_index) + refs = obj.timeseries_index.export(fid, [fullpath '/timeseries_index'], refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/TimeSeries.m b/+types/+core/TimeSeries.m new file mode 100644 index 00000000..5c905fb5 --- /dev/null +++ b/+types/+core/TimeSeries.m @@ -0,0 +1,416 @@ +classdef TimeSeries < types.core.NWBDataInterface & types.untyped.GroupClass +% TIMESERIES General purpose time series. + + +% READONLY PROPERTIES +properties(SetAccess = protected) + starting_time_unit; % (char) Unit of measurement for time, which is fixed to 'seconds'. + timestamps_interval; % (int32) Value is '1' + timestamps_unit; % (char) Unit of measurement for timestamps, which is fixed to 'seconds'. +end +% REQUIRED PROPERTIES +properties + data; % REQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. +end +% OPTIONAL PROPERTIES +properties + comments; % (char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + control; % (uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + control_description; % (char) Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + data_continuity; % (char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + data_conversion; % (single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + data_offset; % (single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + data_resolution; % (single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + data_unit; % (char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + description; % (char) Description of the time series. + starting_time; % (double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + starting_time_rate; % (single) Sampling rate, in Hz. + timestamps; % (double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. +end + +methods + function obj = TimeSeries(varargin) + % TIMESERIES Constructor for TimeSeries + varargin = [{'comments' 'no comments' 'data_conversion' types.util.correctType(1, 'single') 'data_offset' types.util.correctType(0, 'single') 'data_resolution' types.util.correctType(-1, 'single') 'description' 'no description' 'starting_time_unit' 'seconds' 'timestamps_interval' types.util.correctType(1, 'int32') 'timestamps_unit' 'seconds'} varargin]; + obj = obj@types.core.NWBDataInterface(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'comments',[]); + addParameter(p, 'control',[]); + addParameter(p, 'control_description',[]); + addParameter(p, 'data',[]); + addParameter(p, 'data_continuity',[]); + addParameter(p, 'data_conversion',[]); + addParameter(p, 'data_offset',[]); + addParameter(p, 'data_resolution',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'description',[]); + addParameter(p, 'starting_time',[]); + addParameter(p, 'starting_time_rate',[]); + addParameter(p, 'starting_time_unit',[]); + addParameter(p, 'timestamps',[]); + addParameter(p, 'timestamps_interval',[]); + addParameter(p, 'timestamps_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.comments = p.Results.comments; + obj.control = p.Results.control; + obj.control_description = p.Results.control_description; + obj.data = p.Results.data; + obj.data_continuity = p.Results.data_continuity; + obj.data_conversion = p.Results.data_conversion; + obj.data_offset = p.Results.data_offset; + obj.data_resolution = p.Results.data_resolution; + obj.data_unit = p.Results.data_unit; + obj.description = p.Results.description; + obj.starting_time = p.Results.starting_time; + obj.starting_time_rate = p.Results.starting_time_rate; + obj.starting_time_unit = p.Results.starting_time_unit; + obj.timestamps = p.Results.timestamps; + obj.timestamps_interval = p.Results.timestamps_interval; + obj.timestamps_unit = p.Results.timestamps_unit; + if strcmp(class(obj), 'types.core.TimeSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.comments(obj, val) + obj.comments = obj.validate_comments(val); + end + function set.control(obj, val) + obj.control = obj.validate_control(val); + end + function set.control_description(obj, val) + obj.control_description = obj.validate_control_description(val); + end + function set.data(obj, val) + obj.data = obj.validate_data(val); + end + function set.data_continuity(obj, val) + obj.data_continuity = obj.validate_data_continuity(val); + end + function set.data_conversion(obj, val) + obj.data_conversion = obj.validate_data_conversion(val); + end + function set.data_offset(obj, val) + obj.data_offset = obj.validate_data_offset(val); + end + function set.data_resolution(obj, val) + obj.data_resolution = obj.validate_data_resolution(val); + end + function set.data_unit(obj, val) + obj.data_unit = obj.validate_data_unit(val); + end + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.starting_time(obj, val) + obj.starting_time = obj.validate_starting_time(val); + end + function set.starting_time_rate(obj, val) + obj.starting_time_rate = obj.validate_starting_time_rate(val); + end + function set.timestamps(obj, val) + obj.timestamps = obj.validate_timestamps(val); + end + %% VALIDATORS + + function val = validate_comments(obj, val) + val = types.util.checkDtype('comments', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_control(obj, val) + val = types.util.checkDtype('control', 'uint8', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_control_description(obj, val) + val = types.util.checkDtype('control_description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data(obj, val) + + end + function val = validate_data_continuity(obj, val) + val = types.util.checkDtype('data_continuity', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_conversion(obj, val) + val = types.util.checkDtype('data_conversion', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_offset(obj, val) + val = types.util.checkDtype('data_offset', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_resolution(obj, val) + val = types.util.checkDtype('data_resolution', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + val = types.util.checkDtype('data_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_starting_time(obj, val) + val = types.util.checkDtype('starting_time', 'double', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_starting_time_rate(obj, val) + val = types.util.checkDtype('starting_time_rate', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_timestamps(obj, val) + val = types.util.checkDtype('timestamps', 'double', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.comments) + io.writeAttribute(fid, [fullpath '/comments'], obj.comments); + end + if ~isempty(obj.control) + if startsWith(class(obj.control), 'types.untyped.') + refs = obj.control.export(fid, [fullpath '/control'], refs); + elseif ~isempty(obj.control) + io.writeDataset(fid, [fullpath '/control'], obj.control, 'forceArray'); + end + end + if ~isempty(obj.control_description) + if startsWith(class(obj.control_description), 'types.untyped.') + refs = obj.control_description.export(fid, [fullpath '/control_description'], refs); + elseif ~isempty(obj.control_description) + io.writeDataset(fid, [fullpath '/control_description'], obj.control_description, 'forceArray'); + end + end + if startsWith(class(obj.data), 'types.untyped.') + refs = obj.data.export(fid, [fullpath '/data'], refs); + elseif ~isempty(obj.data) + io.writeDataset(fid, [fullpath '/data'], obj.data, 'forceArray'); + end + if ~isempty(obj.data) && ~isa(obj.data, 'types.untyped.SoftLink') && ~isa(obj.data, 'types.untyped.ExternalLink') && ~isempty(obj.data_continuity) + io.writeAttribute(fid, [fullpath '/data/continuity'], obj.data_continuity); + end + if ~isempty(obj.data) && ~isa(obj.data, 'types.untyped.SoftLink') && ~isa(obj.data, 'types.untyped.ExternalLink') && ~isempty(obj.data_conversion) + io.writeAttribute(fid, [fullpath '/data/conversion'], obj.data_conversion); + end + if ~isempty(obj.data) && ~isa(obj.data, 'types.untyped.SoftLink') && ~isa(obj.data, 'types.untyped.ExternalLink') && ~isempty(obj.data_offset) + io.writeAttribute(fid, [fullpath '/data/offset'], obj.data_offset); + end + if ~isempty(obj.data) && ~isa(obj.data, 'types.untyped.SoftLink') && ~isa(obj.data, 'types.untyped.ExternalLink') && ~isempty(obj.data_resolution) + io.writeAttribute(fid, [fullpath '/data/resolution'], obj.data_resolution); + end + if ~isempty(obj.data) && ~isa(obj.data, 'types.untyped.SoftLink') && ~isa(obj.data, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/data/unit'], obj.data_unit); + end + if ~isempty(obj.description) + io.writeAttribute(fid, [fullpath '/description'], obj.description); + end + if ~isempty(obj.starting_time) + if startsWith(class(obj.starting_time), 'types.untyped.') + refs = obj.starting_time.export(fid, [fullpath '/starting_time'], refs); + elseif ~isempty(obj.starting_time) + io.writeDataset(fid, [fullpath '/starting_time'], obj.starting_time); + end + end + if ~isempty(obj.starting_time) && ~isa(obj.starting_time, 'types.untyped.SoftLink') && ~isa(obj.starting_time, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/starting_time/rate'], obj.starting_time_rate); + end + if ~isempty(obj.starting_time) && ~isa(obj.starting_time, 'types.untyped.SoftLink') && ~isa(obj.starting_time, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/starting_time/unit'], obj.starting_time_unit); + end + if ~isempty(obj.timestamps) + if startsWith(class(obj.timestamps), 'types.untyped.') + refs = obj.timestamps.export(fid, [fullpath '/timestamps'], refs); + elseif ~isempty(obj.timestamps) + io.writeDataset(fid, [fullpath '/timestamps'], obj.timestamps, 'forceArray'); + end + end + if ~isempty(obj.timestamps) && ~isa(obj.timestamps, 'types.untyped.SoftLink') && ~isa(obj.timestamps, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/timestamps/interval'], obj.timestamps_interval); + end + if ~isempty(obj.timestamps) && ~isa(obj.timestamps, 'types.untyped.SoftLink') && ~isa(obj.timestamps, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/timestamps/unit'], obj.timestamps_unit); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/TimeSeriesReferenceVectorData.m b/+types/+core/TimeSeriesReferenceVectorData.m new file mode 100644 index 00000000..20f77944 --- /dev/null +++ b/+types/+core/TimeSeriesReferenceVectorData.m @@ -0,0 +1,50 @@ +classdef TimeSeriesReferenceVectorData < types.hdmf_common.VectorData & types.untyped.DatasetClass +% TIMESERIESREFERENCEVECTORDATA Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. + + + +methods + function obj = TimeSeriesReferenceVectorData(varargin) + % TIMESERIESREFERENCEVECTORDATA Constructor for TimeSeriesReferenceVectorData + obj = obj@types.hdmf_common.VectorData(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + if strcmp(class(obj), 'types.core.TimeSeriesReferenceVectorData') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + if isempty(val) || isa(val, 'types.untyped.DataStub') + return; + end + if ~istable(val) && ~isstruct(val) && ~isa(val, 'containers.Map') + error('Property `data` must be a table,struct, or containers.Map.'); + end + vprops = struct(); + vprops.idx_start = 'int32'; + vprops.count = 'int32'; + vprops.timeseries = 'types.untyped.ObjectView'; + val = types.util.checkDtype('data', vprops, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.VectorData(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/TwoPhotonSeries.m b/+types/+core/TwoPhotonSeries.m new file mode 100644 index 00000000..e49afe47 --- /dev/null +++ b/+types/+core/TwoPhotonSeries.m @@ -0,0 +1,132 @@ +classdef TwoPhotonSeries < types.core.ImageSeries & types.untyped.GroupClass +% TWOPHOTONSERIES Image stack recorded over time from 2-photon microscope. + + +% OPTIONAL PROPERTIES +properties + field_of_view; % (single) Width, height and depth of image, or imaged area, in meters. + imaging_plane; % ImagingPlane + pmt_gain; % (single) Photomultiplier gain. + scan_line_rate; % (single) Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. +end + +methods + function obj = TwoPhotonSeries(varargin) + % TWOPHOTONSERIES Constructor for TwoPhotonSeries + obj = obj@types.core.ImageSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'field_of_view',[]); + addParameter(p, 'imaging_plane',[]); + addParameter(p, 'pmt_gain',[]); + addParameter(p, 'scan_line_rate',[]); + misc.parseSkipInvalidName(p, varargin); + obj.field_of_view = p.Results.field_of_view; + obj.imaging_plane = p.Results.imaging_plane; + obj.pmt_gain = p.Results.pmt_gain; + obj.scan_line_rate = p.Results.scan_line_rate; + if strcmp(class(obj), 'types.core.TwoPhotonSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.field_of_view(obj, val) + obj.field_of_view = obj.validate_field_of_view(val); + end + function set.imaging_plane(obj, val) + obj.imaging_plane = obj.validate_imaging_plane(val); + end + function set.pmt_gain(obj, val) + obj.pmt_gain = obj.validate_pmt_gain(val); + end + function set.scan_line_rate(obj, val) + obj.scan_line_rate = obj.validate_scan_line_rate(val); + end + %% VALIDATORS + + function val = validate_field_of_view(obj, val) + val = types.util.checkDtype('field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3], [2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_imaging_plane(obj, val) + val = types.util.checkDtype('imaging_plane', 'types.core.ImagingPlane', val); + end + function val = validate_pmt_gain(obj, val) + val = types.util.checkDtype('pmt_gain', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_scan_line_rate(obj, val) + val = types.util.checkDtype('scan_line_rate', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.field_of_view) + if startsWith(class(obj.field_of_view), 'types.untyped.') + refs = obj.field_of_view.export(fid, [fullpath '/field_of_view'], refs); + elseif ~isempty(obj.field_of_view) + io.writeDataset(fid, [fullpath '/field_of_view'], obj.field_of_view, 'forceArray'); + end + end + refs = obj.imaging_plane.export(fid, [fullpath '/imaging_plane'], refs); + if ~isempty(obj.pmt_gain) + io.writeAttribute(fid, [fullpath '/pmt_gain'], obj.pmt_gain); + end + if ~isempty(obj.scan_line_rate) + io.writeAttribute(fid, [fullpath '/scan_line_rate'], obj.scan_line_rate); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Units.m b/+types/+core/Units.m new file mode 100644 index 00000000..e84b982a --- /dev/null +++ b/+types/+core/Units.m @@ -0,0 +1,184 @@ +classdef Units < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% UNITS Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times. + + +% OPTIONAL PROPERTIES +properties + electrode_group; % (VectorData) Electrode group that each spike unit came from. + electrodes; % (DynamicTableRegion) Electrode that each spike unit came from, specified using a DynamicTableRegion. + electrodes_index; % (VectorIndex) Index into electrodes. + obs_intervals; % (VectorData) Observation intervals for each unit. + obs_intervals_index; % (VectorIndex) Index into the obs_intervals dataset. + spike_times; % (VectorData) Spike times for each unit in seconds. + spike_times_index; % (VectorIndex) Index into the spike_times dataset. + waveform_mean; % (VectorData) Spike waveform mean for each spike unit. + waveform_sd; % (VectorData) Spike waveform standard deviation for each spike unit. + waveforms; % (VectorData) Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + waveforms_index; % (VectorIndex) Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. + waveforms_index_index; % (VectorIndex) Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. +end + +methods + function obj = Units(varargin) + % UNITS Constructor for Units + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'electrode_group',[]); + addParameter(p, 'electrodes',[]); + addParameter(p, 'electrodes_index',[]); + addParameter(p, 'obs_intervals',[]); + addParameter(p, 'obs_intervals_index',[]); + addParameter(p, 'spike_times',[]); + addParameter(p, 'spike_times_index',[]); + addParameter(p, 'waveform_mean',[]); + addParameter(p, 'waveform_sd',[]); + addParameter(p, 'waveforms',[]); + addParameter(p, 'waveforms_index',[]); + addParameter(p, 'waveforms_index_index',[]); + misc.parseSkipInvalidName(p, varargin); + obj.electrode_group = p.Results.electrode_group; + obj.electrodes = p.Results.electrodes; + obj.electrodes_index = p.Results.electrodes_index; + obj.obs_intervals = p.Results.obs_intervals; + obj.obs_intervals_index = p.Results.obs_intervals_index; + obj.spike_times = p.Results.spike_times; + obj.spike_times_index = p.Results.spike_times_index; + obj.waveform_mean = p.Results.waveform_mean; + obj.waveform_sd = p.Results.waveform_sd; + obj.waveforms = p.Results.waveforms; + obj.waveforms_index = p.Results.waveforms_index; + obj.waveforms_index_index = p.Results.waveforms_index_index; + if strcmp(class(obj), 'types.core.Units') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.core.Units') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.electrode_group(obj, val) + obj.electrode_group = obj.validate_electrode_group(val); + end + function set.electrodes(obj, val) + obj.electrodes = obj.validate_electrodes(val); + end + function set.electrodes_index(obj, val) + obj.electrodes_index = obj.validate_electrodes_index(val); + end + function set.obs_intervals(obj, val) + obj.obs_intervals = obj.validate_obs_intervals(val); + end + function set.obs_intervals_index(obj, val) + obj.obs_intervals_index = obj.validate_obs_intervals_index(val); + end + function set.spike_times(obj, val) + obj.spike_times = obj.validate_spike_times(val); + end + function set.spike_times_index(obj, val) + obj.spike_times_index = obj.validate_spike_times_index(val); + end + function set.waveform_mean(obj, val) + obj.waveform_mean = obj.validate_waveform_mean(val); + end + function set.waveform_sd(obj, val) + obj.waveform_sd = obj.validate_waveform_sd(val); + end + function set.waveforms(obj, val) + obj.waveforms = obj.validate_waveforms(val); + end + function set.waveforms_index(obj, val) + obj.waveforms_index = obj.validate_waveforms_index(val); + end + function set.waveforms_index_index(obj, val) + obj.waveforms_index_index = obj.validate_waveforms_index_index(val); + end + %% VALIDATORS + + function val = validate_electrode_group(obj, val) + val = types.util.checkDtype('electrode_group', 'types.hdmf_common.VectorData', val); + end + function val = validate_electrodes(obj, val) + val = types.util.checkDtype('electrodes', 'types.hdmf_common.DynamicTableRegion', val); + end + function val = validate_electrodes_index(obj, val) + val = types.util.checkDtype('electrodes_index', 'types.hdmf_common.VectorIndex', val); + end + function val = validate_obs_intervals(obj, val) + val = types.util.checkDtype('obs_intervals', 'types.hdmf_common.VectorData', val); + end + function val = validate_obs_intervals_index(obj, val) + val = types.util.checkDtype('obs_intervals_index', 'types.hdmf_common.VectorIndex', val); + end + function val = validate_spike_times(obj, val) + val = types.util.checkDtype('spike_times', 'types.hdmf_common.VectorData', val); + end + function val = validate_spike_times_index(obj, val) + val = types.util.checkDtype('spike_times_index', 'types.hdmf_common.VectorIndex', val); + end + function val = validate_waveform_mean(obj, val) + val = types.util.checkDtype('waveform_mean', 'types.hdmf_common.VectorData', val); + end + function val = validate_waveform_sd(obj, val) + val = types.util.checkDtype('waveform_sd', 'types.hdmf_common.VectorData', val); + end + function val = validate_waveforms(obj, val) + val = types.util.checkDtype('waveforms', 'types.hdmf_common.VectorData', val); + end + function val = validate_waveforms_index(obj, val) + val = types.util.checkDtype('waveforms_index', 'types.hdmf_common.VectorIndex', val); + end + function val = validate_waveforms_index_index(obj, val) + val = types.util.checkDtype('waveforms_index_index', 'types.hdmf_common.VectorIndex', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.electrode_group) + refs = obj.electrode_group.export(fid, [fullpath '/electrode_group'], refs); + end + if ~isempty(obj.electrodes) + refs = obj.electrodes.export(fid, [fullpath '/electrodes'], refs); + end + if ~isempty(obj.electrodes_index) + refs = obj.electrodes_index.export(fid, [fullpath '/electrodes_index'], refs); + end + if ~isempty(obj.obs_intervals) + refs = obj.obs_intervals.export(fid, [fullpath '/obs_intervals'], refs); + end + if ~isempty(obj.obs_intervals_index) + refs = obj.obs_intervals_index.export(fid, [fullpath '/obs_intervals_index'], refs); + end + if ~isempty(obj.spike_times) + refs = obj.spike_times.export(fid, [fullpath '/spike_times'], refs); + end + if ~isempty(obj.spike_times_index) + refs = obj.spike_times_index.export(fid, [fullpath '/spike_times_index'], refs); + end + if ~isempty(obj.waveform_mean) + refs = obj.waveform_mean.export(fid, [fullpath '/waveform_mean'], refs); + end + if ~isempty(obj.waveform_sd) + refs = obj.waveform_sd.export(fid, [fullpath '/waveform_sd'], refs); + end + if ~isempty(obj.waveforms) + refs = obj.waveforms.export(fid, [fullpath '/waveforms'], refs); + end + if ~isempty(obj.waveforms_index) + refs = obj.waveforms_index.export(fid, [fullpath '/waveforms_index'], refs); + end + if ~isempty(obj.waveforms_index_index) + refs = obj.waveforms_index_index.export(fid, [fullpath '/waveforms_index_index'], refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/VoltageClampSeries.m b/+types/+core/VoltageClampSeries.m new file mode 100644 index 00000000..862f646d --- /dev/null +++ b/+types/+core/VoltageClampSeries.m @@ -0,0 +1,307 @@ +classdef VoltageClampSeries < types.core.PatchClampSeries & types.untyped.GroupClass +% VOLTAGECLAMPSERIES Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected. + + +% READONLY PROPERTIES +properties(SetAccess = protected) + capacitance_fast_unit; % (char) Unit of measurement for capacitance_fast, which is fixed to 'farads'. + capacitance_slow_unit; % (char) Unit of measurement for capacitance_fast, which is fixed to 'farads'. + resistance_comp_bandwidth_unit; % (char) Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + resistance_comp_correction_unit; % (char) Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + resistance_comp_prediction_unit; % (char) Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + whole_cell_capacitance_comp_unit; % (char) Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + whole_cell_series_resistance_comp_unit; % (char) Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. +end +% OPTIONAL PROPERTIES +properties + capacitance_fast; % (single) Fast capacitance, in farads. + capacitance_slow; % (single) Slow capacitance, in farads. + resistance_comp_bandwidth; % (single) Resistance compensation bandwidth, in hertz. + resistance_comp_correction; % (single) Resistance compensation correction, in percent. + resistance_comp_prediction; % (single) Resistance compensation prediction, in percent. + whole_cell_capacitance_comp; % (single) Whole cell capacitance compensation, in farads. + whole_cell_series_resistance_comp; % (single) Whole cell series resistance compensation, in ohms. +end + +methods + function obj = VoltageClampSeries(varargin) + % VOLTAGECLAMPSERIES Constructor for VoltageClampSeries + varargin = [{'capacitance_fast_unit' 'farads' 'capacitance_slow_unit' 'farads' 'data_unit' 'amperes' 'resistance_comp_bandwidth_unit' 'hertz' 'resistance_comp_correction_unit' 'percent' 'resistance_comp_prediction_unit' 'percent' 'whole_cell_capacitance_comp_unit' 'farads' 'whole_cell_series_resistance_comp_unit' 'ohms'} varargin]; + obj = obj@types.core.PatchClampSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'capacitance_fast',[]); + addParameter(p, 'capacitance_fast_unit',[]); + addParameter(p, 'capacitance_slow',[]); + addParameter(p, 'capacitance_slow_unit',[]); + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'resistance_comp_bandwidth',[]); + addParameter(p, 'resistance_comp_bandwidth_unit',[]); + addParameter(p, 'resistance_comp_correction',[]); + addParameter(p, 'resistance_comp_correction_unit',[]); + addParameter(p, 'resistance_comp_prediction',[]); + addParameter(p, 'resistance_comp_prediction_unit',[]); + addParameter(p, 'whole_cell_capacitance_comp',[]); + addParameter(p, 'whole_cell_capacitance_comp_unit',[]); + addParameter(p, 'whole_cell_series_resistance_comp',[]); + addParameter(p, 'whole_cell_series_resistance_comp_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.capacitance_fast = p.Results.capacitance_fast; + obj.capacitance_fast_unit = p.Results.capacitance_fast_unit; + obj.capacitance_slow = p.Results.capacitance_slow; + obj.capacitance_slow_unit = p.Results.capacitance_slow_unit; + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.resistance_comp_bandwidth = p.Results.resistance_comp_bandwidth; + obj.resistance_comp_bandwidth_unit = p.Results.resistance_comp_bandwidth_unit; + obj.resistance_comp_correction = p.Results.resistance_comp_correction; + obj.resistance_comp_correction_unit = p.Results.resistance_comp_correction_unit; + obj.resistance_comp_prediction = p.Results.resistance_comp_prediction; + obj.resistance_comp_prediction_unit = p.Results.resistance_comp_prediction_unit; + obj.whole_cell_capacitance_comp = p.Results.whole_cell_capacitance_comp; + obj.whole_cell_capacitance_comp_unit = p.Results.whole_cell_capacitance_comp_unit; + obj.whole_cell_series_resistance_comp = p.Results.whole_cell_series_resistance_comp; + obj.whole_cell_series_resistance_comp_unit = p.Results.whole_cell_series_resistance_comp_unit; + if strcmp(class(obj), 'types.core.VoltageClampSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.capacitance_fast(obj, val) + obj.capacitance_fast = obj.validate_capacitance_fast(val); + end + function set.capacitance_slow(obj, val) + obj.capacitance_slow = obj.validate_capacitance_slow(val); + end + function set.resistance_comp_bandwidth(obj, val) + obj.resistance_comp_bandwidth = obj.validate_resistance_comp_bandwidth(val); + end + function set.resistance_comp_correction(obj, val) + obj.resistance_comp_correction = obj.validate_resistance_comp_correction(val); + end + function set.resistance_comp_prediction(obj, val) + obj.resistance_comp_prediction = obj.validate_resistance_comp_prediction(val); + end + function set.whole_cell_capacitance_comp(obj, val) + obj.whole_cell_capacitance_comp = obj.validate_whole_cell_capacitance_comp(val); + end + function set.whole_cell_series_resistance_comp(obj, val) + obj.whole_cell_series_resistance_comp = obj.validate_whole_cell_series_resistance_comp(val); + end + %% VALIDATORS + + function val = validate_capacitance_fast(obj, val) + val = types.util.checkDtype('capacitance_fast', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_capacitance_slow(obj, val) + val = types.util.checkDtype('capacitance_slow', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data(obj, val) + + end + function val = validate_resistance_comp_bandwidth(obj, val) + val = types.util.checkDtype('resistance_comp_bandwidth', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_resistance_comp_correction(obj, val) + val = types.util.checkDtype('resistance_comp_correction', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_resistance_comp_prediction(obj, val) + val = types.util.checkDtype('resistance_comp_prediction', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_whole_cell_capacitance_comp(obj, val) + val = types.util.checkDtype('whole_cell_capacitance_comp', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_whole_cell_series_resistance_comp(obj, val) + val = types.util.checkDtype('whole_cell_series_resistance_comp', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.PatchClampSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.capacitance_fast) + if startsWith(class(obj.capacitance_fast), 'types.untyped.') + refs = obj.capacitance_fast.export(fid, [fullpath '/capacitance_fast'], refs); + elseif ~isempty(obj.capacitance_fast) + io.writeDataset(fid, [fullpath '/capacitance_fast'], obj.capacitance_fast); + end + end + if ~isempty(obj.capacitance_fast) && ~isa(obj.capacitance_fast, 'types.untyped.SoftLink') && ~isa(obj.capacitance_fast, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/capacitance_fast/unit'], obj.capacitance_fast_unit); + end + if ~isempty(obj.capacitance_slow) + if startsWith(class(obj.capacitance_slow), 'types.untyped.') + refs = obj.capacitance_slow.export(fid, [fullpath '/capacitance_slow'], refs); + elseif ~isempty(obj.capacitance_slow) + io.writeDataset(fid, [fullpath '/capacitance_slow'], obj.capacitance_slow); + end + end + if ~isempty(obj.capacitance_slow) && ~isa(obj.capacitance_slow, 'types.untyped.SoftLink') && ~isa(obj.capacitance_slow, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/capacitance_slow/unit'], obj.capacitance_slow_unit); + end + if ~isempty(obj.resistance_comp_bandwidth) + if startsWith(class(obj.resistance_comp_bandwidth), 'types.untyped.') + refs = obj.resistance_comp_bandwidth.export(fid, [fullpath '/resistance_comp_bandwidth'], refs); + elseif ~isempty(obj.resistance_comp_bandwidth) + io.writeDataset(fid, [fullpath '/resistance_comp_bandwidth'], obj.resistance_comp_bandwidth); + end + end + if ~isempty(obj.resistance_comp_bandwidth) && ~isa(obj.resistance_comp_bandwidth, 'types.untyped.SoftLink') && ~isa(obj.resistance_comp_bandwidth, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/resistance_comp_bandwidth/unit'], obj.resistance_comp_bandwidth_unit); + end + if ~isempty(obj.resistance_comp_correction) + if startsWith(class(obj.resistance_comp_correction), 'types.untyped.') + refs = obj.resistance_comp_correction.export(fid, [fullpath '/resistance_comp_correction'], refs); + elseif ~isempty(obj.resistance_comp_correction) + io.writeDataset(fid, [fullpath '/resistance_comp_correction'], obj.resistance_comp_correction); + end + end + if ~isempty(obj.resistance_comp_correction) && ~isa(obj.resistance_comp_correction, 'types.untyped.SoftLink') && ~isa(obj.resistance_comp_correction, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/resistance_comp_correction/unit'], obj.resistance_comp_correction_unit); + end + if ~isempty(obj.resistance_comp_prediction) + if startsWith(class(obj.resistance_comp_prediction), 'types.untyped.') + refs = obj.resistance_comp_prediction.export(fid, [fullpath '/resistance_comp_prediction'], refs); + elseif ~isempty(obj.resistance_comp_prediction) + io.writeDataset(fid, [fullpath '/resistance_comp_prediction'], obj.resistance_comp_prediction); + end + end + if ~isempty(obj.resistance_comp_prediction) && ~isa(obj.resistance_comp_prediction, 'types.untyped.SoftLink') && ~isa(obj.resistance_comp_prediction, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/resistance_comp_prediction/unit'], obj.resistance_comp_prediction_unit); + end + if ~isempty(obj.whole_cell_capacitance_comp) + if startsWith(class(obj.whole_cell_capacitance_comp), 'types.untyped.') + refs = obj.whole_cell_capacitance_comp.export(fid, [fullpath '/whole_cell_capacitance_comp'], refs); + elseif ~isempty(obj.whole_cell_capacitance_comp) + io.writeDataset(fid, [fullpath '/whole_cell_capacitance_comp'], obj.whole_cell_capacitance_comp); + end + end + if ~isempty(obj.whole_cell_capacitance_comp) && ~isa(obj.whole_cell_capacitance_comp, 'types.untyped.SoftLink') && ~isa(obj.whole_cell_capacitance_comp, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/whole_cell_capacitance_comp/unit'], obj.whole_cell_capacitance_comp_unit); + end + if ~isempty(obj.whole_cell_series_resistance_comp) + if startsWith(class(obj.whole_cell_series_resistance_comp), 'types.untyped.') + refs = obj.whole_cell_series_resistance_comp.export(fid, [fullpath '/whole_cell_series_resistance_comp'], refs); + elseif ~isempty(obj.whole_cell_series_resistance_comp) + io.writeDataset(fid, [fullpath '/whole_cell_series_resistance_comp'], obj.whole_cell_series_resistance_comp); + end + end + if ~isempty(obj.whole_cell_series_resistance_comp) && ~isa(obj.whole_cell_series_resistance_comp, 'types.untyped.SoftLink') && ~isa(obj.whole_cell_series_resistance_comp, 'types.untyped.ExternalLink') + io.writeAttribute(fid, [fullpath '/whole_cell_series_resistance_comp/unit'], obj.whole_cell_series_resistance_comp_unit); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/VoltageClampStimulusSeries.m b/+types/+core/VoltageClampStimulusSeries.m new file mode 100644 index 00000000..ca589bf3 --- /dev/null +++ b/+types/+core/VoltageClampStimulusSeries.m @@ -0,0 +1,43 @@ +classdef VoltageClampStimulusSeries < types.core.PatchClampSeries & types.untyped.GroupClass +% VOLTAGECLAMPSTIMULUSSERIES Stimulus voltage applied during a voltage clamp recording. + + + +methods + function obj = VoltageClampStimulusSeries(varargin) + % VOLTAGECLAMPSTIMULUSSERIES Constructor for VoltageClampStimulusSeries + varargin = [{'data_unit' 'volts'} varargin]; + obj = obj@types.core.PatchClampSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.VoltageClampStimulusSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.PatchClampSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/AlignedDynamicTable.m b/+types/+hdmf_common/AlignedDynamicTable.m new file mode 100644 index 00000000..1754bee8 --- /dev/null +++ b/+types/+hdmf_common/AlignedDynamicTable.m @@ -0,0 +1,78 @@ +classdef AlignedDynamicTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass +% ALIGNEDDYNAMICTABLE DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. + + +% OPTIONAL PROPERTIES +properties + categories; % (char) The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group. + dynamictable; % (DynamicTable) A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable. +end + +methods + function obj = AlignedDynamicTable(varargin) + % ALIGNEDDYNAMICTABLE Constructor for AlignedDynamicTable + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); + [obj.dynamictable, ivarargin] = types.util.parseConstrained(obj,'dynamictable', 'types.hdmf_common.DynamicTable', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'categories',[]); + misc.parseSkipInvalidName(p, varargin); + obj.categories = p.Results.categories; + if strcmp(class(obj), 'types.hdmf_common.AlignedDynamicTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + if strcmp(class(obj), 'types.hdmf_common.AlignedDynamicTable') + types.util.dynamictable.checkConfig(obj); + end + end + %% SETTERS + function set.categories(obj, val) + obj.categories = obj.validate_categories(val); + end + function set.dynamictable(obj, val) + obj.dynamictable = obj.validate_dynamictable(val); + end + %% VALIDATORS + + function val = validate_categories(obj, val) + val = types.util.checkDtype('categories', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_dynamictable(obj, val) + namedprops = struct(); + constrained = {'types.hdmf_common.DynamicTable'}; + types.util.checkSet('dynamictable', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.DynamicTable(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/categories'], obj.categories, 'forceArray'); + if ~isempty(obj.dynamictable) + refs = obj.dynamictable.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/CSRMatrix.m b/+types/+hdmf_common/CSRMatrix.m new file mode 100644 index 00000000..64e9b4d5 --- /dev/null +++ b/+types/+hdmf_common/CSRMatrix.m @@ -0,0 +1,137 @@ +classdef CSRMatrix < types.hdmf_common.Container & types.untyped.GroupClass +% CSRMATRIX A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. + + +% REQUIRED PROPERTIES +properties + data; % REQUIRED (any) The non-zero values in the matrix. + indices; % REQUIRED (uint) The column indices. + indptr; % REQUIRED (uint) The row index pointer. +end +% OPTIONAL PROPERTIES +properties + shape; % (uint) The shape (number of rows, number of columns) of this sparse matrix. +end + +methods + function obj = CSRMatrix(varargin) + % CSRMATRIX Constructor for CSRMatrix + obj = obj@types.hdmf_common.Container(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'indices',[]); + addParameter(p, 'indptr',[]); + addParameter(p, 'shape',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.indices = p.Results.indices; + obj.indptr = p.Results.indptr; + obj.shape = p.Results.shape; + if strcmp(class(obj), 'types.hdmf_common.CSRMatrix') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.data(obj, val) + obj.data = obj.validate_data(val); + end + function set.indices(obj, val) + obj.indices = obj.validate_indices(val); + end + function set.indptr(obj, val) + obj.indptr = obj.validate_indptr(val); + end + function set.shape(obj, val) + obj.shape = obj.validate_shape(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + + end + function val = validate_indices(obj, val) + val = types.util.checkDtype('indices', 'uint', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_indptr(obj, val) + val = types.util.checkDtype('indptr', 'uint', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_shape(obj, val) + val = types.util.checkDtype('shape', 'uint', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.Container(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if startsWith(class(obj.data), 'types.untyped.') + refs = obj.data.export(fid, [fullpath '/data'], refs); + elseif ~isempty(obj.data) + io.writeDataset(fid, [fullpath '/data'], obj.data, 'forceArray'); + end + if startsWith(class(obj.indices), 'types.untyped.') + refs = obj.indices.export(fid, [fullpath '/indices'], refs); + elseif ~isempty(obj.indices) + io.writeDataset(fid, [fullpath '/indices'], obj.indices, 'forceArray'); + end + if startsWith(class(obj.indptr), 'types.untyped.') + refs = obj.indptr.export(fid, [fullpath '/indptr'], refs); + elseif ~isempty(obj.indptr) + io.writeDataset(fid, [fullpath '/indptr'], obj.indptr, 'forceArray'); + end + io.writeAttribute(fid, [fullpath '/shape'], obj.shape, 'forceArray'); + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/Container.m b/+types/+hdmf_common/Container.m new file mode 100644 index 00000000..b0075373 --- /dev/null +++ b/+types/+hdmf_common/Container.m @@ -0,0 +1,28 @@ +classdef Container < types.untyped.MetaClass & types.untyped.GroupClass +% CONTAINER An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. + + + +methods + function obj = Container(varargin) + % CONTAINER Constructor for Container + obj = obj@types.untyped.MetaClass(varargin{:}); + if strcmp(class(obj), 'types.hdmf_common.Container') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.untyped.MetaClass(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/Data.m b/+types/+hdmf_common/Data.m new file mode 100644 index 00000000..a53a8e72 --- /dev/null +++ b/+types/+hdmf_common/Data.m @@ -0,0 +1,45 @@ +classdef Data < types.untyped.MetaClass & types.untyped.DatasetClass +% DATA An abstract data type for a dataset. + + +% REQUIRED PROPERTIES +properties + data; % REQUIRED any +end + +methods + function obj = Data(varargin) + % DATA Constructor for Data + obj = obj@types.untyped.MetaClass(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + if strcmp(class(obj), 'types.hdmf_common.Data') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.data(obj, val) + obj.data = obj.validate_data(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.untyped.MetaClass(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/DynamicTable.m b/+types/+hdmf_common/DynamicTable.m new file mode 100644 index 00000000..80bc4d01 --- /dev/null +++ b/+types/+hdmf_common/DynamicTable.m @@ -0,0 +1,132 @@ +classdef DynamicTable < types.hdmf_common.Container & types.untyped.GroupClass +% DYNAMICTABLE A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. + + +% REQUIRED PROPERTIES +properties + id; % REQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. +end +% OPTIONAL PROPERTIES +properties + colnames; % (char) The names of the columns in this table. This should be used to specify an order to the columns. + description; % (char) Description of what is in this dynamic table. + vectordata; % (VectorData) Vector columns, including index columns, of this dynamic table. +end + +methods + function obj = DynamicTable(varargin) + % DYNAMICTABLE Constructor for DynamicTable + obj = obj@types.hdmf_common.Container(varargin{:}); + [obj.vectordata, ivarargin] = types.util.parseConstrained(obj,'vectordata', 'types.hdmf_common.VectorData', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'colnames',[]); + addParameter(p, 'description',[]); + addParameter(p, 'id',[]); + misc.parseSkipInvalidName(p, varargin); + obj.colnames = p.Results.colnames; + obj.description = p.Results.description; + obj.id = p.Results.id; + if strcmp(class(obj), 'types.hdmf_common.DynamicTable') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.colnames(obj, val) + obj.colnames = obj.validate_colnames(val); + end + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.id(obj, val) + obj.id = obj.validate_id(val); + end + function set.vectordata(obj, val) + obj.vectordata = obj.validate_vectordata(val); + end + %% VALIDATORS + + function val = validate_colnames(obj, val) + val = types.util.checkDtype('colnames', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_id(obj, val) + val = types.util.checkDtype('id', 'types.hdmf_common.ElementIdentifiers', val); + end + function val = validate_vectordata(obj, val) + constrained = { 'types.hdmf_common.VectorData' }; + types.util.checkSet('vectordata', struct(), constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.Container(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/colnames'], obj.colnames, 'forceArray'); + io.writeAttribute(fid, [fullpath '/description'], obj.description); + refs = obj.id.export(fid, [fullpath '/id'], refs); + if ~isempty(obj.vectordata) + refs = obj.vectordata.export(fid, fullpath, refs); + end + end + %% TABLE METHODS + function addRow(obj, varargin) + types.util.dynamictable.addRow(obj, varargin{:}); + end + + function addColumn(obj, varargin) + types.util.dynamictable.addColumn(obj, varargin{:}); + end + + function row = getRow(obj, id, varargin) + row = types.util.dynamictable.getRow(obj, id, varargin{:}); + end + + function table = toTable(obj, varargin) + table = types.util.dynamictable.nwbToTable(obj, varargin{:}); + end + + function clear(obj) + types.util.dynamictable.clear(obj); + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/DynamicTableRegion.m b/+types/+hdmf_common/DynamicTableRegion.m new file mode 100644 index 00000000..e24dc59e --- /dev/null +++ b/+types/+hdmf_common/DynamicTableRegion.m @@ -0,0 +1,88 @@ +classdef DynamicTableRegion < types.hdmf_common.VectorData & types.untyped.DatasetClass +% DYNAMICTABLEREGION DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. + + +% OPTIONAL PROPERTIES +properties + table; % (Object Reference to DynamicTable) Reference to the DynamicTable object that this region applies to. +end + +methods + function obj = DynamicTableRegion(varargin) + % DYNAMICTABLEREGION Constructor for DynamicTableRegion + obj = obj@types.hdmf_common.VectorData(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'description',[]); + addParameter(p, 'table',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.description = p.Results.description; + obj.table = p.Results.table; + if strcmp(class(obj), 'types.hdmf_common.DynamicTableRegion') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.table(obj, val) + obj.table = obj.validate_table(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'int8', val); + end + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_table(obj, val) + % Reference to type `DynamicTable` + val = types.util.checkDtype('table', 'types.untyped.ObjectView', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.VectorData(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/table'], obj.table); + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/ElementIdentifiers.m b/+types/+hdmf_common/ElementIdentifiers.m new file mode 100644 index 00000000..d9bd22b0 --- /dev/null +++ b/+types/+hdmf_common/ElementIdentifiers.m @@ -0,0 +1,40 @@ +classdef ElementIdentifiers < types.hdmf_common.Data & types.untyped.DatasetClass +% ELEMENTIDENTIFIERS A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + + + +methods + function obj = ElementIdentifiers(varargin) + % ELEMENTIDENTIFIERS Constructor for ElementIdentifiers + obj = obj@types.hdmf_common.Data(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + if strcmp(class(obj), 'types.hdmf_common.ElementIdentifiers') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'int8', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.Data(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/SimpleMultiContainer.m b/+types/+hdmf_common/SimpleMultiContainer.m new file mode 100644 index 00000000..fbc4746b --- /dev/null +++ b/+types/+hdmf_common/SimpleMultiContainer.m @@ -0,0 +1,63 @@ +classdef SimpleMultiContainer < types.hdmf_common.Container & types.untyped.GroupClass +% SIMPLEMULTICONTAINER A simple Container for holding onto multiple containers. + + +% OPTIONAL PROPERTIES +properties + container; % (Container) Container objects held within this SimpleMultiContainer. + data; % (Data) Data objects held within this SimpleMultiContainer. +end + +methods + function obj = SimpleMultiContainer(varargin) + % SIMPLEMULTICONTAINER Constructor for SimpleMultiContainer + obj = obj@types.hdmf_common.Container(varargin{:}); + [obj.container, ivarargin] = types.util.parseConstrained(obj,'container', 'types.hdmf_common.Container', varargin{:}); + varargin(ivarargin) = []; + [obj.data, ivarargin] = types.util.parseConstrained(obj,'data', 'types.hdmf_common.Data', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.hdmf_common.SimpleMultiContainer') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.container(obj, val) + obj.container = obj.validate_container(val); + end + function set.data(obj, val) + obj.data = obj.validate_data(val); + end + %% VALIDATORS + + function val = validate_container(obj, val) + namedprops = struct(); + constrained = {'types.hdmf_common.Container'}; + types.util.checkSet('container', namedprops, constrained, val); + end + function val = validate_data(obj, val) + constrained = { 'types.hdmf_common.Data' }; + types.util.checkSet('data', struct(), constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.Container(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.container) + refs = obj.container.export(fid, fullpath, refs); + end + if ~isempty(obj.data) + refs = obj.data.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/VectorData.m b/+types/+hdmf_common/VectorData.m new file mode 100644 index 00000000..cc269845 --- /dev/null +++ b/+types/+hdmf_common/VectorData.m @@ -0,0 +1,135 @@ +classdef VectorData < types.hdmf_common.Data & types.untyped.DatasetClass +% VECTORDATA An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + + +% HIDDEN READONLY PROPERTIES +properties(Hidden, SetAccess = protected) + unit; % (char) NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. The value must be 'volts' +end +% HIDDEN PROPERTIES +properties(Hidden) + resolution; % (double) NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. + sampling_rate; % (single) NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. Must be Hertz +end +% OPTIONAL PROPERTIES +properties + description; % (char) Description of what these vectors represent. +end + +methods + function obj = VectorData(varargin) + % VECTORDATA Constructor for VectorData + varargin = [{'unit' 'volts'} varargin]; + obj = obj@types.hdmf_common.Data(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'description',[]); + addParameter(p, 'resolution',[]); + addParameter(p, 'sampling_rate',[]); + addParameter(p, 'unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.description = p.Results.description; + obj.resolution = p.Results.resolution; + obj.sampling_rate = p.Results.sampling_rate; + obj.unit = p.Results.unit; + if strcmp(class(obj), 'types.hdmf_common.VectorData') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.description(obj, val) + obj.description = obj.validate_description(val); + end + function set.resolution(obj, val) + obj.resolution = obj.validate_resolution(val); + end + function set.sampling_rate(obj, val) + obj.sampling_rate = obj.validate_sampling_rate(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + end + function val = validate_description(obj, val) + val = types.util.checkDtype('description', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_resolution(obj, val) + val = types.util.checkDtype('resolution', 'double', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_sampling_rate(obj, val) + val = types.util.checkDtype('sampling_rate', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.Data(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/description'], obj.description); + if ~isempty(obj.resolution) && any(endsWith(fullpath, 'units/spike_times')) + io.writeAttribute(fid, [fullpath '/resolution'], obj.resolution); + end + validDataSamplingPaths = strcat('units/', {'waveform_mean', 'waveform_sd', 'waveforms'}); + if ~isempty(obj.sampling_rate) && any(endsWith(fullpath, validDataSamplingPaths)) + io.writeAttribute(fid, [fullpath '/sampling_rate'], obj.sampling_rate); + end + validUnitPaths = strcat('units/', {'waveform_mean', 'waveform_sd', 'waveforms'}); + if ~isempty(obj.unit) && any(endsWith(fullpath, validUnitPaths)) + io.writeAttribute(fid, [fullpath '/unit'], obj.unit); + end + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/VectorIndex.m b/+types/+hdmf_common/VectorIndex.m new file mode 100644 index 00000000..75a9e683 --- /dev/null +++ b/+types/+hdmf_common/VectorIndex.m @@ -0,0 +1,68 @@ +classdef VectorIndex < types.hdmf_common.VectorData & types.untyped.DatasetClass +% VECTORINDEX Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by "_index". + + +% OPTIONAL PROPERTIES +properties + target; % (Object Reference to VectorData) Reference to the target dataset that this index applies to. +end + +methods + function obj = VectorIndex(varargin) + % VECTORINDEX Constructor for VectorIndex + obj = obj@types.hdmf_common.VectorData(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'target',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.target = p.Results.target; + if strcmp(class(obj), 'types.hdmf_common.VectorIndex') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.target(obj, val) + obj.target = obj.validate_target(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'uint8', val); + end + function val = validate_target(obj, val) + % Reference to type `VectorData` + val = types.util.checkDtype('target', 'types.untyped.ObjectView', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.VectorData(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/target'], obj.target); + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_experimental/EnumData.m b/+types/+hdmf_experimental/EnumData.m new file mode 100644 index 00000000..1608dea0 --- /dev/null +++ b/+types/+hdmf_experimental/EnumData.m @@ -0,0 +1,68 @@ +classdef EnumData < types.hdmf_common.VectorData & types.untyped.DatasetClass +% ENUMDATA Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. + + +% OPTIONAL PROPERTIES +properties + elements; % (Object Reference to VectorData) Reference to the VectorData object that contains the enumerable elements +end + +methods + function obj = EnumData(varargin) + % ENUMDATA Constructor for EnumData + obj = obj@types.hdmf_common.VectorData(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'elements',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.elements = p.Results.elements; + if strcmp(class(obj), 'types.hdmf_experimental.EnumData') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.elements(obj, val) + obj.elements = obj.validate_elements(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'uint8', val); + end + function val = validate_elements(obj, val) + % Reference to type `VectorData` + val = types.util.checkDtype('elements', 'types.untyped.ObjectView', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.VectorData(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + io.writeAttribute(fid, [fullpath '/elements'], obj.elements); + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_experimental/ExternalResources.m b/+types/+hdmf_experimental/ExternalResources.m new file mode 100644 index 00000000..ee018fdf --- /dev/null +++ b/+types/+hdmf_experimental/ExternalResources.m @@ -0,0 +1,87 @@ +classdef ExternalResources < types.hdmf_common.Container & types.untyped.GroupClass +% EXTERNALRESOURCES A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version. + + +% REQUIRED PROPERTIES +properties + entities; % REQUIRED (Data) A table for mapping user terms (i.e., keys) to resource entities. + keys; % REQUIRED (Data) A table for storing user terms that are used to refer to external resources. + object_keys; % REQUIRED (Data) A table for identifying which objects use which keys. + objects; % REQUIRED (Data) A table for identifying which objects in a file contain references to external resources. + resources; % REQUIRED (Data) A table for mapping user terms (i.e., keys) to resource entities. +end + +methods + function obj = ExternalResources(varargin) + % EXTERNALRESOURCES Constructor for ExternalResources + obj = obj@types.hdmf_common.Container(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'entities',[]); + addParameter(p, 'keys',[]); + addParameter(p, 'object_keys',[]); + addParameter(p, 'objects',[]); + addParameter(p, 'resources',[]); + misc.parseSkipInvalidName(p, varargin); + obj.entities = p.Results.entities; + obj.keys = p.Results.keys; + obj.object_keys = p.Results.object_keys; + obj.objects = p.Results.objects; + obj.resources = p.Results.resources; + if strcmp(class(obj), 'types.hdmf_experimental.ExternalResources') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.entities(obj, val) + obj.entities = obj.validate_entities(val); + end + function set.keys(obj, val) + obj.keys = obj.validate_keys(val); + end + function set.object_keys(obj, val) + obj.object_keys = obj.validate_object_keys(val); + end + function set.objects(obj, val) + obj.objects = obj.validate_objects(val); + end + function set.resources(obj, val) + obj.resources = obj.validate_resources(val); + end + %% VALIDATORS + + function val = validate_entities(obj, val) + val = types.util.checkDtype('entities', 'types.hdmf_common.Data', val); + end + function val = validate_keys(obj, val) + val = types.util.checkDtype('keys', 'types.hdmf_common.Data', val); + end + function val = validate_object_keys(obj, val) + val = types.util.checkDtype('object_keys', 'types.hdmf_common.Data', val); + end + function val = validate_objects(obj, val) + val = types.util.checkDtype('objects', 'types.hdmf_common.Data', val); + end + function val = validate_resources(obj, val) + val = types.util.checkDtype('resources', 'types.hdmf_common.Data', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.hdmf_common.Container(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.entities.export(fid, [fullpath '/entities'], refs); + refs = obj.keys.export(fid, [fullpath '/keys'], refs); + refs = obj.object_keys.export(fid, [fullpath '/object_keys'], refs); + refs = obj.objects.export(fid, [fullpath '/objects'], refs); + refs = obj.resources.export(fid, [fullpath '/resources'], refs); + end +end + +end \ No newline at end of file diff --git a/.gitignore b/.gitignore index 02aa82eb..988501fa 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,3 @@ -/+types/ -!/+types/+untyped/ -!/+types/+util/ -!/+types/+untyped/@DataStub testResults.xml coverage.xml .ropeproject diff --git a/README.md b/README.md index 711202c9..9fad1343 100644 --- a/README.md +++ b/README.md @@ -26,12 +26,11 @@ The returned NwbFile object provides an in-memory view of the underlying NWB dat ### Step 2b: Writing a NWB File -Writing a NWB file requires first generating the class files that you will need (or an environment from a previous `nwbRead`). -From the MATLAB command line, add MatNWB to the path and generate the core classes for the most recent NWB schema. The generated classes are normally placed in the `+types` subdirectory in the MatNWB installation directory. As MATLAB [packages](https://www.mathworks.com/help/matlab/matlab_oop/scoping-classes-with-packages.html), these generated classes comprise the building blocks you will need to write your NWB file. + +From the MATLAB command line, add MatNWB to the path. The generated classes are normally placed in the `+types` subdirectory in the MatNWB installation directory. As MATLAB [packages](https://www.mathworks.com/help/matlab/matlab_oop/scoping-classes-with-packages.html), these generated classes comprise the building blocks you will need to write your NWB file. ```matlab addpath('path/to/matnwb'); -generateCore(); % generate the most recent nwb-schema release. ``` Once you have configured your NWB File, you may write the `NwbFile` object to disk using the `nwbExport` function. diff --git a/nwbClearGenerated.m b/nwbClearGenerated.m new file mode 100644 index 00000000..0294c30b --- /dev/null +++ b/nwbClearGenerated.m @@ -0,0 +1,11 @@ +function nwbClearGenerated() + %% NWBCLEARGENERATED clears generated class files. + nwbDir = misc.getMatnwbDir(); + typesPath = fullfile(nwbDir, '+types'); + listing = dir(typesPath); + moduleNames = setdiff({listing.name}, {'+untyped', '+util', '.', '..'}); + generatedPaths = fullfile(typesPath, moduleNames); + for i=1:length(generatedPaths) + rmdir(generatedPaths{i}, 's'); + end +end \ No newline at end of file diff --git a/nwbtest.m b/nwbtest.m index 44e9cd32..cf6feca8 100644 --- a/nwbtest.m +++ b/nwbtest.m @@ -44,6 +44,7 @@ ws = pwd; + nwbClearGenerated(); % clear default files if any. pvcell = struct2pvcell(parser.Unmatched); suite = TestSuite.fromPackage('tests', 'IncludingSubpackages', true, pvcell{:}); @@ -56,7 +57,7 @@ [installDir, ~, ~] = fileparts(mfilename('fullpath')); ignoreFolders = {'tutorials', '+contrib', '+util', 'external_packages', '+tests'}; - ignorePaths = {fullfile('+misc', 'generateDocs.m'), [mfilename '.m']}; + ignorePaths = {fullfile('+misc', 'generateDocs.m'), [mfilename '.m'], 'nwbClearGenerated.m'}; mfilePaths = getMfilePaths(installDir, ignoreFolders, ignorePaths); if ~verLessThan('matlab', '9.3') && ~isempty(mfilePaths) runner.addPlugin(CodeCoveragePlugin.forFile(mfilePaths,... diff --git a/tutorials/dynamic_tables.mlx b/tutorials/dynamic_tables.mlx index 2898c475..58d471c8 100644 Binary files a/tutorials/dynamic_tables.mlx and b/tutorials/dynamic_tables.mlx differ diff --git a/tutorials/html/dynamic_tables.html b/tutorials/html/dynamic_tables.html index 857d892e..3eaab62e 100644 --- a/tutorials/html/dynamic_tables.html +++ b/tutorials/html/dynamic_tables.html @@ -1,5 +1,5 @@ -DynamicTables Tutorial

Introduction to MatNWB

Table of Contents
Installing MatNWB +.embeddedOutputsVariableElement.rightPaneElement { min-height: 16px;} +.rightPaneElement .variableElement { padding-left: 9px;} +.outputsOnRight .embeddedOutputsVariableElement.rightPaneElement .eoOutputContent { /* Remove extra space allocated for navigation border */ margin-top: 0; margin-bottom: 0;} +.variableNameElement { margin-bottom: 3px; display: inline-block;} +/* * Ellipses as base64 for HTML export. */.matrixElement .horizontalEllipsis,.rtcDataTipElement .matrixElement .horizontalEllipsis { display: inline-block; margin-top: 3px; /* base64 encoded version of images-liveeditor/HEllipsis.png */ width: 30px; height: 12px; background-repeat: no-repeat; background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAJCAYAAADO1CeCAAAAJUlEQVR42mP4//8/A70xw0i29BUDFPxnAEtTW37wWDqakIa4pQDvOOG89lHX2gAAAABJRU5ErkJggg==");} +.matrixElement .verticalEllipsis,.textElement .verticalEllipsis,.rtcDataTipElement .matrixElement .verticalEllipsis,.rtcDataTipElement .textElement .verticalEllipsis { margin-left: 35px; /* base64 encoded version of images-liveeditor/VEllipsis.png */ width: 12px; height: 30px; background-repeat: no-repeat; background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAZCAYAAAAIcL+IAAAALklEQVR42mP4//8/AzGYgWyFMECMwv8QddRS+P//KyimlmcGUOFoOI6GI/UVAgDnd8Dd4+NCwgAAAABJRU5ErkJggg==");}

Introduction to MatNWB

Installing MatNWB

Use the code below within the brackets to install MatNWB from source. MatNWB works by automatically creating API classes based on the schema. Use generateCore to generate these classes.
%{
!git clone https://github.com/NeurodataWithoutBorders/matnwb.git
cd matnwb
addpath(genpath(pwd));
generateCore();
%}

Set up the NWB file

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session start time. Create a new NWBFile object with those and additional metadata using the NwbFile command. For all MatNWB classes and functions, we use the Matlab method of entering keyword argument pairs, where arguments are entered as name followed by value. Ellipses are used for clarity.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3), ...
'general_experimenter', 'My Name', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011'); % optional
nwb
nwb =
NwbFile with properties: +Next steps

Installing MatNWB

Use the code below within the brackets to install MatNWB from source. MatNWB works by automatically creating API classes based on the schema.
%{
!git clone https://github.com/NeurodataWithoutBorders/matnwb.git
addpath(genpath(pwd));
%}

Set up the NWB file

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session start time. Create a new NWBFile object with those and additional metadata using the NwbFile command. For all MatNWB classes and functions, we use the Matlab method of entering keyword argument pairs, where arguments are entered as name followed by value. Ellipses are used for clarity.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3), ...
'general_experimenter', 'My Name', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011'); % optional
nwb
nwb =
NwbFile with properties: nwb_version: '2.4.0' acquisition: [0×1 types.untyped.Set] @@ -121,7 +127,7 @@ stimulus_templates: [0×1 types.untyped.Set] timestamps_reference_time: [] units: [] -

Subject information

You can also provide information about your subject in the NWB file. Create a Subject object to store information such as age, species, genotype, sex, and a freeform description. Then set nwb.general_subject to the Subject object.
Each of these fields is free-form, so any values will be valid, but here are our recommendations:
  • For age, we recommend using the ISO 8601 Duration format
  • For species, we recommend using the formal latin binomal name (e.g. mouse -> Mus musculus, human -> Homo sapiens)
  • For sex, we recommend using F (female), M (male), U (unknown), and O (other)
subject = types.core.Subject( ...
'subject_id', '001', ...
'age', 'P90D', ...
'description', 'mouse 5', ...
'species', 'Mus musculus', ...
'sex', 'M' ...
);
nwb.general_subject = subject;
subject
subject =
Subject with properties: +

Subject information

You can also provide information about your subject in the NWB file. Create a Subject object to store information such as age, species, genotype, sex, and a freeform description. Then set nwb.general_subject to the Subject object.
Each of these fields is free-form, so any values will be valid, but here are our recommendations:
  • For age, we recommend using the ISO 8601 Duration format
  • For species, we recommend using the formal latin binomal name (e.g. mouse -> Mus musculus, human -> Homo sapiens)
  • For sex, we recommend using F (female), M (male), U (unknown), and O (other)
subject = types.core.Subject( ...
'subject_id', '001', ...
'age', 'P90D', ...
'description', 'mouse 5', ...
'species', 'Mus musculus', ...
'sex', 'M' ...
);
nwb.general_subject = subject;
 
subject
subject =
Subject with properties: age: 'P90D' date_of_birth: [] @@ -132,7 +138,7 @@ strain: [] subject_id: '001' weight: [] -
Note: the DANDI archive requires all NWB files to have a subject object with subject_id specified, and strongly encourages specifying the other fields.

Behavior

SpatialSeries and Position

Many types of data have special data types in NWB. To store the spatial position of a subject, we will use the SpatialSeries and Position classes.
Note: These diagrams follow a standard convention called "UML class diagram" to express the object-oriented relationships between NWB classes. For our purposes, all you need to know is that an open triangle means "extends" and an open diamond means "is contained within." Learn more about class diagrams on the wikipedia page.
SpatialSeries is a subclass of TimeSeries, a common base class for measurements sampled over time, and provides fields for data and time (regularly or irregularly sampled). Here, we put a SpatialSeries object called 'SpatialSeries' in a Position object.
% create SpatialSeries object
spatial_series_ts = types.core.SpatialSeries( ...
'data', [linspace(0,10,100); linspace(0,8,100)], ...
'reference_frame', '(0,0) is bottom left corner', ...
'timestamps', linspace(0, 100)/200 ...
);
% create Position object and add SpatialSeries
Position = types.core.Position('SpatialSeries', spatial_series_ts);
% create processing module
behavior_mod = types.core.ProcessingModule( 'description', 'contains behavioral data');
% add the Position object (that holds the SpatialSeries object)
behavior_mod.nwbdatainterface.set('Position', Position);
NWB differentiates between raw, acquired data, which should never change, and processed data, which are the results of preprocessing algorithms and could change. Let's assume that the animal's position was computed from a video tracking algorithm, so it would be classified as processed data. Since processed data can be very diverse, NWB allows us to create processing modules, which are like folders, to store related processed data or data that comes from a single algorithm.
Create a processing module called "behavior" for storing behavioral data in the NWBFile and add the Position object to the module.
% create processing module
behavior_mod = types.core.ProcessingModule('description', 'contains behavioral data');
% add the Position object (that holds the SpatialSeries object) to the
% module and name the Position object "Position"
behavior_mod.nwbdatainterface.set('Position', Position);
% add the processing module to the NWBFile object, and name the processing module "behavior"
nwb.processing.set('behavior', behavior_mod);

Trials

Trials are stored in a TimeIntervals object which is a subclass of DynamicTable. DynamicTable objects are used to store tabular metadata throughout NWB, including for trials, electrodes, and sorted units. They offer flexibility for tabular data by allowing required columns, optional columns, and custom columns.
The trials DynamicTable can be thought of as a table with this structure:
Trials are stored in a TimeIntervals object which subclasses DynamicTable. Here, we are adding 'correct', which will be a logical array.
trials = types.core.TimeIntervals( ...
'colnames', {'start_time', 'stop_time', 'correct'}, ...
'description', 'trial data and properties', ...
'id', types.hdmf_common.ElementIdentifiers('data', 0:2), ...
'start_time', types.hdmf_common.VectorData( ...
'data', [0.1, 1.5, 2.5], ...
'description','start time of trial in seconds' ...
), ...
'stop_time', types.hdmf_common.VectorData( ...
'data', [1.0, 2.0, 3.0], ...
'description','end of each trial in seconds' ...
), ...
'correct', types.hdmf_common.VectorData( ...
'data', [false, true, false], ...
'description', 'whether the trial was correct') ...
);
nwb.intervals_trials = trials;

Write

Now, to write the NWB file that we have built so far:
nwbExport(nwb, 'intro_tutorial.nwb')
We can use the HDFView application to inspect the resulting NWB file.

Read

We can then read the file back in using MatNWB and inspect its contents.
read_nwbfile = nwbRead('intro_tutorial.nwb')
read_nwbfile =
NwbFile with properties: +
Note: the DANDI archive requires all NWB files to have a subject object with subject_id specified, and strongly encourages specifying the other fields.

Behavior

SpatialSeries and Position

Many types of data have special data types in NWB. To store the spatial position of a subject, we will use the SpatialSeries and Position classes.
Note: These diagrams follow a standard convention called "UML class diagram" to express the object-oriented relationships between NWB classes. For our purposes, all you need to know is that an open triangle means "extends" and an open diamond means "is contained within." Learn more about class diagrams on the wikipedia page.
SpatialSeries is a subclass of TimeSeries, a common base class for measurements sampled over time, and provides fields for data and time (regularly or irregularly sampled). Here, we put a SpatialSeries object called 'SpatialSeries' in a Position object.
% create SpatialSeries object
spatial_series_ts = types.core.SpatialSeries( ...
'data', [linspace(0,10,100); linspace(0,8,100)], ...
'reference_frame', '(0,0) is bottom left corner', ...
'timestamps', linspace(0, 100)/200 ...
);
 
% create Position object and add SpatialSeries
Position = types.core.Position('SpatialSeries', spatial_series_ts);
 
% create processing module
behavior_mod = types.core.ProcessingModule('description', 'contains behavioral data');
 
% add the Position object (that holds the SpatialSeries object)
behavior_mod.nwbdatainterface.set('Position', Position);
NWB differentiates between raw, acquired data, which should never change, and processed data, which are the results of preprocessing algorithms and could change. Let's assume that the animal's position was computed from a video tracking algorithm, so it would be classified as processed data. Since processed data can be very diverse, NWB allows us to create processing modules, which are like folders, to store related processed data or data that comes from a single algorithm.
Create a processing module called "behavior" for storing behavioral data in the NWBFile and add the Position object to the module.
% create processing module
behavior_mod = types.core.ProcessingModule('description', 'contains behavioral data');
 
% add the Position object (that holds the SpatialSeries object) to the
% module and name the Position object "Position"
behavior_mod.nwbdatainterface.set('Position', Position);
 
% add the processing module to the NWBFile object, and name the processing module "behavior"
nwb.processing.set('behavior', behavior_mod);

Trials

Trials are stored in a TimeIntervals object which is a subclass of DynamicTable. DynamicTable objects are used to store tabular metadata throughout NWB, including for trials, electrodes, and sorted units. They offer flexibility for tabular data by allowing required columns, optional columns, and custom columns.
The trials DynamicTable can be thought of as a table with this structure:
Trials are stored in a TimeIntervals object which subclasses DynamicTable. Here, we are adding 'correct', which will be a logical array.
trials = types.core.TimeIntervals( ...
'colnames', {'start_time', 'stop_time', 'correct'}, ...
'description', 'trial data and properties', ...
'id', types.hdmf_common.ElementIdentifiers('data', 0:2), ...
'start_time', types.hdmf_common.VectorData( ...
'data', [0.1, 1.5, 2.5], ...
'description','start time of trial in seconds' ...
), ...
'stop_time', types.hdmf_common.VectorData( ...
'data', [1.0, 2.0, 3.0], ...
'description','end of each trial in seconds' ...
), ...
'correct', types.hdmf_common.VectorData( ...
'data', [false, true, false], ...
'description', 'whether the trial was correct') ...
);
nwb.intervals_trials = trials;

Write

Now, to write the NWB file that we have built so far:
nwbExport(nwb, 'intro_tutorial.nwb')
We can use the HDFView application to inspect the resulting NWB file.

Read

We can then read the file back in using MatNWB and inspect its contents.
read_nwbfile = nwbRead('intro_tutorial.nwb', 'ignorecache')
read_nwbfile =
NwbFile with properties: nwb_version: '2.4.0' acquisition: [0×1 types.untyped.Set] @@ -183,7 +189,7 @@ stimulus_templates: [0×1 types.untyped.Set] timestamps_reference_time: 2018-04-25T02:30:03.000000-04:00 units: [] -
We can print the SpatialSeries data traversing the hierarchy of objects. The processing module called 'behavior' contains our Position object named 'Position'. The Position object contains our SpatialSeries object named 'SpatialSeries'.
read_spatial_series = read_nwbfile.processing.get('behavior'). ...
nwbdatainterface.get('Position').spatialseries.get('SpatialSeries')
read_spatial_series =
SpatialSeries with properties: +
We can print the SpatialSeries data traversing the hierarchy of objects. The processing module called 'behavior' contains our Position object named 'Position'. The Position object contains our SpatialSeries object named 'SpatialSeries'.
read_spatial_series = read_nwbfile.processing.get('behavior'). ...
nwbdatainterface.get('Position').spatialseries.get('SpatialSeries')
read_spatial_series =
SpatialSeries with properties: reference_frame: '(0,0) is bottom left corner' starting_time_unit: 'seconds' @@ -201,31 +207,28 @@ starting_time: [] starting_time_rate: [] timestamps: [1×1 types.untyped.DataStub] -

Reading data

Counter to normal MATLAB workflow, data arrays are read passively from the file. Calling read_spatial_series.data does not read the data values, but presents a DataStub object that can be indexed to read data.
read_spatial_series.data
ans =
DataStub with properties: +

Reading data

Counter to normal MATLAB workflow, data arrays are read passively from the file. Calling read_spatial_series.data does not read the data values, but presents a DataStub object that can be indexed to read data.
read_spatial_series.data
ans =
DataStub with properties: filename: 'intro_tutorial.nwb' path: '/processing/behavior/Position/SpatialSeries/data' dims: [2 100] ndims: 2 -
This allows you to conveniently work with datasets that are too large to fit in RAM all at once. Access all the data in the matrix using the load method with no arguments.
read_spatial_series.data.load
ans = 2×100
0 0.1010 0.2020 0.3030 0.4040 0.5051 0.6061 0.7071 0.8081 0.9091 1.0101 1.1111 1.2121 1.3131 1.4141 1.5152 1.6162 1.7172 1.8182 1.9192 2.0202 2.1212 2.2222 2.3232 2.4242 2.5253 2.6263 2.7273 2.8283 2.9293 3.0303 3.1313 3.2323 3.3333 3.4343 3.5354 3.6364 3.7374 3.8384 3.9394 4.0404 4.1414 4.2424 4.3434 4.4444 4.5455 4.6465 4.7475 4.8485 4.9495 +
This allows you to conveniently work with datasets that are too large to fit in RAM all at once. Access all the data in the matrix using the load method with no arguments.
read_spatial_series.data.load
ans = 2×100
0 0.1010 0.2020 0.3030 0.4040 0.5051 0.6061 0.7071 0.8081 0.9091 1.0101 1.1111 1.2121 1.3131 1.4141 1.5152 1.6162 1.7172 1.8182 1.9192 2.0202 2.1212 2.2222 2.3232 2.4242 2.5253 2.6263 2.7273 2.8283 2.9293 3.0303 3.1313 3.2323 3.3333 3.4343 3.5354 3.6364 3.7374 3.8384 3.9394 4.0404 4.1414 4.2424 4.3434 4.4444 4.5455 4.6465 4.7475 4.8485 4.9495 0 0.0808 0.1616 0.2424 0.3232 0.4040 0.4848 0.5657 0.6465 0.7273 0.8081 0.8889 0.9697 1.0505 1.1313 1.2121 1.2929 1.3737 1.4545 1.5354 1.6162 1.6970 1.7778 1.8586 1.9394 2.0202 2.1010 2.1818 2.2626 2.3434 2.4242 2.5051 2.5859 2.6667 2.7475 2.8283 2.9091 2.9899 3.0707 3.1515 3.2323 3.3131 3.3939 3.4747 3.5556 3.6364 3.7172 3.7980 3.8788 3.9596 -
If you only need a section of the data, you can read only that section by indexing the DataStub object like a normal array in MATLAB. This will just read the selected region from disk into RAM. This technique is particularly useful if you are dealing with a large dataset that is too big to fit entirely into your available RAM.
read_spatial_series.data(:, 1:10)
ans = 2×10
0 0.1010 0.2020 0.3030 0.4040 0.5051 0.6061 0.7071 0.8081 0.9091 +
If you only need a section of the data, you can read only that section by indexing the DataStub object like a normal array in MATLAB. This will just read the selected region from disk into RAM. This technique is particularly useful if you are dealing with a large dataset that is too big to fit entirely into your available RAM.
read_spatial_series.data(:, 1:10)
ans = 2×10
0 0.1010 0.2020 0.3030 0.4040 0.5051 0.6061 0.7071 0.8081 0.9091 0 0.0808 0.1616 0.2424 0.3232 0.4040 0.4848 0.5657 0.6465 0.7273 -

Next steps

This concludes the introductory tutorial. Please proceed to one of the specialized tutorials, which are designed to follow this one.
See the API documentation to learn what data types are available.
+

Next steps

This concludes the introductory tutorial. Please proceed to one of the specialized tutorials, which are designed to follow this one.
See the API documentation to learn what data types are available.

\ No newline at end of file +--> +
\ No newline at end of file diff --git a/tutorials/html/scratch.html b/tutorials/html/scratch.html index d2577162..fe467263 100644 --- a/tutorials/html/scratch.html +++ b/tutorials/html/scratch.html @@ -1,5 +1,5 @@ -Scratch Data

Scratch Data

This tutorial will focus on the basics of working with a NWBFile for storing non-standardizable data. For example, you may want to store results from one-off analyses of some temporary utility. NWB provides in-file scratch space as a dedicated location where miscellaneous non-standard data may be written.
Table of Contents

Setup

Let us first set up an environment with some "acquired data".
generateCore('savedir', '.');
ContextFile = NwbFile(...
'session_description', 'demonstrate NWBFile scratch', ... % required
'identifier', 'SCRATCH-0', ... % required
'session_start_time', datetime(2019, 4, 3, 11, 0, 0, 'TimeZone', 'local'), ... % required
'file_create_date', datetime(2019, 4, 15, 12, 0, 0, 'TimeZone', 'local'), ... % optional
'general_experimenter', 'Niu, Lawrence', ...
'general_institution', 'NWB' ...
);
% simulate some data
timestamps = 0:100:1024;
data = sin(0.333 .* timestamps) ...
+ cos(0.1 .* timestamps) ...
+ randn(1, length(timestamps));
RawTs = types.core.TimeSeries(...
'data', data, ...
'data_unit', 'm', ...
'starting_time', 0., ...
'starting_time_rate', 100, ...
'description', 'simulated acquired data' ...
);
ContextFile.acquisition.set('raw_timeseries', RawTs);
 
% "analyze" the simulated data
% we provide a re-implementation of scipy.signal.correlate(..., mode='same')
% Ideally, you should use MATLAB-native code though using its equivalent function (xcorr) requires
% the Signal Processing Toolbox
correlatedData = sameCorr(RawTs.data, ones(128, 1)) ./ 128;
% If you are unsure of how HDF5 paths map to MatNWB property structures, we suggest using HDFView to
% verify. In most cases, MatNWB properties map directly to HDF5 paths.
FilteredTs = types.core.TimeSeries( ...
'data', correlatedData, ...
'data_unit', 'm', ...
'starting_time', 0, ...
'starting_time_rate', 100, ...
'description', 'cross-correlated data' ...
)
FilteredTs =
TimeSeries with properties: +Writing Data to Scratch Space

Setup

Let us first set up an environment with some "acquired data".
ContextFile = NwbFile(...
'session_description', 'demonstrate NWBFile scratch', ... % required
'identifier', 'SCRATCH-0', ... % required
'session_start_time', datetime(2019, 4, 3, 11, 0, 0, 'TimeZone', 'local'), ... % required
'file_create_date', datetime(2019, 4, 15, 12, 0, 0, 'TimeZone', 'local'), ... % optional
'general_experimenter', 'Niu, Lawrence', ...
'general_institution', 'NWB' ...
);
% simulate some data
timestamps = 0:100:1024;
data = sin(0.333 .* timestamps) ...
+ cos(0.1 .* timestamps) ...
+ randn(1, length(timestamps));
RawTs = types.core.TimeSeries(...
'data', data, ...
'data_unit', 'm', ...
'starting_time', 0., ...
'starting_time_rate', 100, ...
'description', 'simulated acquired data' ...
);
ContextFile.acquisition.set('raw_timeseries', RawTs);
 
% "analyze" the simulated data
% we provide a re-implementation of scipy.signal.correlate(..., mode='same')
% Ideally, you should use MATLAB-native code though using its equivalent function (xcorr) requires
% the Signal Processing Toolbox
correlatedData = sameCorr(RawTs.data, ones(128, 1)) ./ 128;
% If you are unsure of how HDF5 paths map to MatNWB property structures, we suggest using HDFView to
% verify. In most cases, MatNWB properties map directly to HDF5 paths.
FilteredTs = types.core.TimeSeries( ...
'data', correlatedData, ...
'data_unit', 'm', ...
'starting_time', 0, ...
'starting_time_rate', 100, ...
'description', 'cross-correlated data' ...
)
FilteredTs =
TimeSeries with properties: starting_time_unit: 'seconds' timestamps_interval: 1 @@ -56,7 +56,7 @@ starting_time: 0 starting_time_rate: 100 timestamps: [] -
ProcModule = types.core.ProcessingModule( ...
'description', 'a module to store filtering results', ...
'filtered_timeseries', FilteredTs ...
);
ContextFile.processing.set('core', ProcModule);
nwbExport(ContextFile, 'context_file.nwb');

Warning Regarding the Usage of Scratch Space

Scratch data written into the scratch space should not be intended for reuse or sharing. Standard NWB types, along with any extensions, should always be used for any data intended to be shared. Published data should not include scratch data and any reuse should not require scratch data for data processing.

Writing Data to Scratch Space

Let us first copy what we need from the processed data file.
ScratchFile = NwbFile('identifier', 'SCRATCH-1');
ContextFile = nwbRead('./context_file.nwb', 'ignorecache');
% again, copy the required metadata from the processed file.
ScratchFile.session_description = ContextFile.session_description;
ScratchFile.session_start_time = ContextFile.session_start_time;
We can now do an analysis lacking specification but that we still wish to store results for.
% ProcessingModule stores its timeseries inside of the "nwbdatainterface" property which is a Set of
% NWBDataInterface objects. This is not directly mapped to the NWB file but is used to distinguish
% it and DynamicTable objects which it stores under the "dynamictable" property.
FilteredTs = ContextFile.processing.get('core').nwbdatainterface.get('filtered_timeseries');
% note: MatNWB does not currently support complex numbers. If you wish to store the data, consider
% storing each number as a struct which will write the data to HDF5 using compound types.
dataFft = real(fft(FilteredTs.data.load()));
ScratchData = types.core.ScratchData( ...
'data', dataFft, ...
'notes', 'discrete Fourier transform from filtered data' ...
)
ScratchData =
ScratchData with properties: +
ProcModule = types.core.ProcessingModule( ...
'description', 'a module to store filtering results', ...
'filtered_timeseries', FilteredTs ...
);
ContextFile.processing.set('core', ProcModule);
nwbExport(ContextFile, 'context_file.nwb');

Warning Regarding the Usage of Scratch Space

Scratch data written into the scratch space should not be intended for reuse or sharing. Standard NWB types, along with any extensions, should always be used for any data intended to be shared. Published data should not include scratch data and any reuse should not require scratch data for data processing.

Writing Data to Scratch Space

Let us first copy what we need from the processed data file.
ScratchFile = NwbFile('identifier', 'SCRATCH-1');
ContextFile = nwbRead('./context_file.nwb', 'ignorecache');
% again, copy the required metadata from the processed file.
ScratchFile.session_description = ContextFile.session_description;
ScratchFile.session_start_time = ContextFile.session_start_time;
We can now do an analysis lacking specification but that we still wish to store results for.
% ProcessingModule stores its timeseries inside of the "nwbdatainterface" property which is a Set of
% NWBDataInterface objects. This is not directly mapped to the NWB file but is used to distinguish
% it and DynamicTable objects which it stores under the "dynamictable" property.
FilteredTs = ContextFile.processing.get('core').nwbdatainterface.get('filtered_timeseries');
% note: MatNWB does not currently support complex numbers. If you wish to store the data, consider
% storing each number as a struct which will write the data to HDF5 using compound types.
dataFft = real(fft(FilteredTs.data.load()));
ScratchData = types.core.ScratchData( ...
'data', dataFft, ...
'notes', 'discrete Fourier transform from filtered data' ...
)
ScratchData =
ScratchData with properties: notes: 'discrete Fourier transform from filtered data' data: [11×1 double] @@ -73,7 +73,6 @@ %% Setup % Let us first set up an environment with some "acquired data". -generateCore('savedir', '.'); ContextFile = NwbFile(... 'session_description', 'demonstrate NWBFile scratch', ... % required 'identifier', 'SCRATCH-0', ... % required diff --git a/tutorials/intro.mlx b/tutorials/intro.mlx index 00c6f791..0bfe9552 100644 Binary files a/tutorials/intro.mlx and b/tutorials/intro.mlx differ diff --git a/tutorials/scratch.mlx b/tutorials/scratch.mlx index e783bc19..88fe567d 100644 Binary files a/tutorials/scratch.mlx and b/tutorials/scratch.mlx differ