diff --git a/CHANGES.rst b/CHANGES.rst index c4b3970a08..72ae8eb3ab 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,12 @@ New Features Cubeviz ^^^^^^^ +- Automatic spectral extraction now goes through the logic of the spectral extraction plugin for + self-consistency. This results in several breaking changes to data-labels and ``get_data`` + (the extracted spectra are now given dedicated data-labels instead of referring to them by + the label of the flux cube) as well as to several plugins: model fitting, gaussian smooth, + line analysis, and moment maps. [#2827] + Imviz ^^^^^ @@ -28,6 +34,14 @@ API Changes Cubeviz ^^^^^^^ +- ``get_data`` no longer supports ``function`` or ``spatial_subset`` as arguments. To access + an extracted 1D spectrum, use the Spectral Extraction plugin or the automatic extraction of + spatial subsets, and refer to the data-label assigned to the resulting 1D spectrum. [#2827] + +- Several plugins that take 1D spectra replace ``spatial_subset`` with referring to the 1D + spectrum in ``dataset``. This affects: model fitting, gaussian smooth, line analysis, + and moment maps. [#2827] + Imviz ^^^^^ diff --git a/docs/cubeviz/export_data.rst b/docs/cubeviz/export_data.rst index fb860fd915..3053ca65e9 100644 --- a/docs/cubeviz/export_data.rst +++ b/docs/cubeviz/export_data.rst @@ -17,25 +17,15 @@ Spatial Regions :ref:`Export Spatial Regions ` Documentation on how to export spatial regions. -Since Specviz can be accessed from Cubeviz, the following line of code -can be used to extract the *spectrum* of a spatial subset named "Subset 1": +Any cube (or extracted spectrum) can be extracted from cubeviz: .. code-block:: python - subset1_spec1d = cubeviz.specviz.get_spectra(spectral_subset="Subset 1") + subset1_spec1d = cubeviz.get_data("Spectrum (Subset 1, sum)") -An example without accessing Specviz: -.. code-block:: python - - subset1_spec1d = cubeviz.get_data(data_label=flux_data_label, - spatial_subset="Subset 1", - function="mean") - -Note that in the above example, the ``function`` keyword is used to tell Cubeviz -how to collapse the flux cube down to a one dimensional spectrum - this is not -necessarily equivalent to the collapsed spectrum in the spectrum viewer, which -may have used a different collapse function. +To use a ``function`` other than sum, use the :ref:`Spectral Extraction ` plugin +first to create a 1D spectrum and then refer to it by label in ``get_data``. To get all subsets from the spectrum viewer: @@ -58,11 +48,12 @@ To access the spatial regions themselves: :ref:`Export Spectra ` Documentation on how to export data from the ``spectrum-viewer``. -The following line of code can be used to extract a spectral subset named "Subset 2": +The following line of code can be used to extract 1D spectrum either automatically extracted +or extracted manually through the :ref:`Spectral Extraction ` plugin: .. code-block:: python - subset2_spec1d = cubeviz.specviz.get_spectra("Subset 2") + subset2_spec1d = cubeviz.get_data("Spectrum (Subset 2, sum)") 3D Data Cubes ============= @@ -85,16 +76,6 @@ where the mask (if available) is as defined in mydata.write("mydata.fits", format="jdaviz-cube") -Data can also be accessed directly from ``data_collection`` using the following code: - -.. code-block:: python - - cubeviz.app.data_collection[0] - -Which is returned as a `~glue.core.data.Data` object. The -`~glue.core.data_collection.DataCollection` object -can be indexed to return all available data (i.e., not just using 0 like in the -previous example). .. _cubeviz-export-model: diff --git a/docs/cubeviz/plugins.rst b/docs/cubeviz/plugins.rst index f389521165..55fa3c0276 100644 --- a/docs/cubeviz/plugins.rst +++ b/docs/cubeviz/plugins.rst @@ -292,9 +292,6 @@ Spectral Extraction .. image:: ../img/cubeviz_spectral_extraction.png -.. note:: - - Spectral Extraction requires at least version 5.3.2 of astropy. The Spectral Extraction plugin produces a 1D spectrum from a spectral cube. The 1D spectrum can be computed via the sum, mean, minimum, or diff --git a/jdaviz/app.py b/jdaviz/app.py index 8b60246c98..e65de0270e 100644 --- a/jdaviz/app.py +++ b/jdaviz/app.py @@ -152,7 +152,7 @@ def to_unit(self, data, cid, values, original_units, target_units): eqv = u.spectral_density(spec.spectral_axis) else: # spectral axis - eqv = u.spectral() + eqv = u.spectral() + u.pixel_scale(1*u.pix) return (values * u.Unit(original_units)).to_value(u.Unit(target_units), equivalencies=eqv) @@ -412,6 +412,8 @@ def __init__(self, configuration=None, *args, **kwargs): self._get_object_cache = {} self.hub.subscribe(self, SubsetUpdateMessage, handler=self._on_subset_update_message) + self.hub.subscribe(self, SubsetDeleteMessage, + handler=self._on_subset_delete_message) # Store for associations between Data entries: self._data_associations = self._init_data_associations() @@ -423,8 +425,7 @@ def __init__(self, configuration=None, *args, **kwargs): handler=self._on_layers_changed) self.hub.subscribe(self, SubsetCreateMessage, handler=self._on_layers_changed) - self.hub.subscribe(self, SubsetDeleteMessage, - handler=self._on_layers_changed) + # SubsetDeleteMessage will also call _on_layers_changed via _on_subset_delete_message def _on_plugin_table_added(self, msg): if msg.plugin._plugin_name is None: @@ -433,7 +434,7 @@ def _on_plugin_table_added(self, msg): key = f"{msg.plugin._plugin_name}: {msg.table._table_name}" self._plugin_tables.setdefault(key, msg.table.user_api) - def _update_live_plugin_results(self, trigger_data_lbl=None, trigger_subset=None): + def _iter_live_plugin_results(self, trigger_data_lbl=None, trigger_subset=None): trigger_subset_lbl = trigger_subset.label if trigger_subset is not None else None for data in self.data_collection: plugin_inputs = data.meta.get('_update_live_plugin_results', None) @@ -455,18 +456,34 @@ def _update_live_plugin_results(self, trigger_data_lbl=None, trigger_subset=None for attr in data_subs]): # trigger parent data of subset does not match subscribed data entries continue + yield (data, plugin_inputs) + + def _update_live_plugin_results(self, trigger_data_lbl=None, trigger_subset=None): + for data, plugin_inputs in self._iter_live_plugin_results(trigger_data_lbl, trigger_subset): # update and overwrite data # make a new instance of the plugin to avoid changing any UI settings plg = self._jdaviz_helper.plugins.get(data.meta.get('Plugin'))._obj.new() if not plg.supports_auto_update: raise NotImplementedError(f"{data.meta.get('Plugin')} does not support live-updates") # noqa plg.user_api.from_dict(plugin_inputs) + # keep auto-updating, even if the option is hidden from the user API + # (can remove this line if auto_update is exposed to the user API in the future) + plg.add_results.auto_update_result = True try: plg() except Exception as e: self.hub.broadcast(SnackbarMessage( f"Auto-update for {plugin_inputs['add_results']['label']} failed: {e}", sender=self, color="error")) + # TODO: should we delete the entry (but then any plot options, etc, are lost) + # self.vue_data_item_remove({'item_name': data.label}) + + def _remove_live_plugin_results(self, trigger_data_lbl=None, trigger_subset=None): + for data, plugin_inputs in self._iter_live_plugin_results(trigger_data_lbl, trigger_subset): + self.hub.broadcast(SnackbarMessage( + f"Removing {data.label} due to deletion of {trigger_subset.label if trigger_subset is not None else trigger_data_lbl}", # noqa + sender=self, color="warning")) + self.vue_data_item_remove({'item_name': data.label}) def _on_add_data_message(self, msg): self._on_layers_changed(msg) @@ -478,6 +495,10 @@ def _on_subset_update_message(self, msg): if msg.attribute == 'subset_state': self._update_live_plugin_results(trigger_subset=msg.subset) + def _on_subset_delete_message(self, msg): + self._remove_live_plugin_results(trigger_subset=msg.subset) + self._on_layers_changed(msg) + def _on_plugin_plot_added(self, msg): if msg.plugin._plugin_name is None: # plugin was instantiated after the app was created, ignore @@ -2123,7 +2144,14 @@ def set_data_visibility(self, viewer_reference, data_label, visible=True, replac data = self.data_collection[data_label] - viewer.add_data(data, percentile=95, color=viewer.color_cycler()) + # set the original color based on metadata preferences, if provided, and otherwise + # based on the colorcycler + # NOTE: this is intentionally not a single line to avoid incrementing the color-cycler + # unless it is used + color = data.meta.get('_default_color') + if color is None: + color = viewer.color_cycler() + viewer.add_data(data, percentile=95, color=color) # Specviz removes the data from collection in viewer.py if flux unit incompatible. if data_label not in self.data_collection: @@ -2315,13 +2343,6 @@ def _on_data_deleted(self, msg): if data_item['name'] == msg.data.label: self.state.data_items.remove(data_item) - # TODO: Fix bug with DataCollectionDeleteMessage not working with - # a handler in cubeviz/plugins/viewers.py. This code is a temporary - # workaround for that. - if self.config == 'cubeviz': - viewer = self.get_viewer(self._jdaviz_helper._default_spectrum_viewer_reference_name) - viewer._check_if_data_removed(msg=msg) - self._clear_object_cache(msg.data.label) def _create_data_item(self, data): @@ -2479,7 +2500,7 @@ def _create_viewer_item(self, viewer, vid=None, name=None, reference=None, 'layer_options': "IPY_MODEL_" + viewer.layer_options.model_id, 'viewer_options': "IPY_MODEL_" + viewer.viewer_options.model_id, 'selected_data_items': {}, # noqa data_id: visibility state (visible, hidden, mixed), READ-ONLY - 'visible_layers': {}, # label: {color, label_suffix}, READ-ONLY + 'visible_layers': {}, # label: {color}, READ-ONLY 'wcs_only_layers': wcs_only_layers, 'reference_data_label': reference_data_label, 'canvas_angle': 0, # canvas rotation clockwise rotation angle in deg @@ -2686,7 +2707,7 @@ def compose_viewer_area(viewer_area_items): for name in config.get('tray', []): tray = tray_registry.members.get(name) - tray_item_instance = tray.get('cls')(app=self) + tray_item_instance = tray.get('cls')(app=self, tray_instance=True) # store a copy of the tray name in the instance so it can be accessed by the # plugin itself diff --git a/jdaviz/components/viewer_data_select.vue b/jdaviz/components/viewer_data_select.vue index decb5f8162..b8fb708b58 100644 --- a/jdaviz/components/viewer_data_select.vue +++ b/jdaviz/components/viewer_data_select.vue @@ -208,12 +208,8 @@ module.exports = { } else if (this.$props.viewer.config === 'cubeviz') { if (this.$props.viewer.reference === 'spectrum-viewer') { if (item.meta.Plugin === undefined) { - // then the data can be a cube (auto-collapsed) as long as its the flux data - // if this logic moves to python, we could check directly against reference data instead - return (item.name.indexOf('[FLUX]') !== -1 || item.name.indexOf('[SCI]') !== -1) && this.dataItemInViewer(item, returnExtraItems) - } else if (item.meta.Plugin === 'GaussianSmooth') { - // spectrally smoothed would still be a collapsible cube - return item.ndims === 3 && this.dataItemInViewer(item, returnExtraItems) + // then only allow 1d spectra (not cubes or images) + return item.ndims === 1 } else { // filter plugin results to only those that are spectra return item.ndims === 1 && this.dataItemInViewer(item, returnExtraItems) diff --git a/jdaviz/components/viewer_data_select_item.vue b/jdaviz/components/viewer_data_select_item.vue index 2ad3625ad5..f9064f0f14 100644 --- a/jdaviz/components/viewer_data_select_item.vue +++ b/jdaviz/components/viewer_data_select_item.vue @@ -178,8 +178,6 @@ module.exports = { return ['IVAR', 'ERR'].indexOf(extension) !== -1 } else if (this.$props.viewer.reference === 'mask-viewer') { return ['MASK', 'DQ'].indexOf(extension) !== -1 - } else if (this.$props.viewer.reference === 'spectrum-viewer') { - return ['SCI', 'FLUX'].indexOf(extension) !== -1 } } else if (this.$props.viewer.config === 'specviz2d') { if (this.$props.viewer.reference === 'spectrum-2d-viewer') { diff --git a/jdaviz/configs/cubeviz/cubeviz.yaml b/jdaviz/configs/cubeviz/cubeviz.yaml index 4bbefeac30..f5da2d27a9 100644 --- a/jdaviz/configs/cubeviz/cubeviz.yaml +++ b/jdaviz/configs/cubeviz/cubeviz.yaml @@ -25,13 +25,13 @@ tray: - g-markers - cubeviz-slice - g-unit-conversion + - cubeviz-spectral-extraction - g-gaussian-smooth - g-collapse - g-model-fitting - g-line-list - specviz-line-analysis - cubeviz-moment-maps - - cubeviz-spectral-extraction - imviz-aper-phot-simple - export viewer_area: diff --git a/jdaviz/configs/cubeviz/helper.py b/jdaviz/configs/cubeviz/helper.py index 1b6df6f293..15dd1425c4 100644 --- a/jdaviz/configs/cubeviz/helper.py +++ b/jdaviz/configs/cubeviz/helper.py @@ -5,6 +5,7 @@ from specutils import Spectrum1D from specutils.io.registers import _astropy_has_priorities +from jdaviz.core.events import SnackbarMessage from jdaviz.core.helpers import ImageConfigHelper from jdaviz.configs.default.plugins.line_lists.line_list_mixin import LineListMixin from jdaviz.configs.specviz import Specviz @@ -79,6 +80,27 @@ def load_data(self, data, data_label=None, override_cube_limit=False, **kwargs): super().load_data(data, parser_reference="cubeviz-data-parser", **kwargs) + if 'Spectral Extraction' not in self.plugins: # pragma: no cover + msg = SnackbarMessage( + "Automatic spectral extraction requires the Spectral Extraction plugin to be enabled", # noqa + color='error', sender=self, timeout=10000) + self.app.hub.broadcast(msg) + else: + try: + self.plugins['Spectral Extraction']._obj._extract_in_new_instance(auto_update=False, add_data=True) # noqa + except Exception: + msg = SnackbarMessage( + "Automatic spectrum extraction for the entire cube failed." + " See the spectral extraction plugin to perform a custom extraction", + color='error', sender=self, timeout=10000) + else: + msg = SnackbarMessage( + "The extracted 1D spectrum was generated automatically for the entire cube." + " See the spectral extraction plugin for details or to" + " perform a custom extraction.", + color='warning', sender=self, timeout=10000) + self.app.hub.broadcast(msg) + @deprecated(since="3.9", alternative="select_wavelength") def select_slice(self, slice): """ @@ -120,26 +142,21 @@ def specviz(self): self._specviz = Specviz(app=self.app) return self._specviz - def get_data(self, data_label=None, spatial_subset=None, spectral_subset=None, function=None, + def get_data(self, data_label=None, spatial_subset=None, spectral_subset=None, cls=None, use_display_units=False): """ Returns data with name equal to ``data_label`` of type ``cls`` with subsets applied from - ``spatial_subset`` and/or ``spectral_subset`` using ``function`` if applicable. + ``spectral_subset``, if applicable. Parameters ---------- data_label : str, optional Provide a label to retrieve a specific data set from data_collection. spatial_subset : str, optional - Spatial subset applied to data. + Spatial subset applied to data. Only applicable if ``data_label`` points to a cube or + image. To extract a spectrum from a cube, use the spectral extraction plugin instead. spectral_subset : str, optional Spectral subset applied to data. - function : {True, False, 'minimum', 'maximum', 'mean', 'median', 'sum'}, optional - Ignored if ``data_label`` does not point to cube-like data. - If True, will collapse according to the current collapse function defined in the - spectrum viewer. If provided as a string, the cube will be collapsed with the provided - function. If False, None, or not passed, the entire cube will be returned (unless there - are values for ``spatial_subset`` and ``spectral_subset``). cls : `~specutils.Spectrum1D`, `~astropy.nddata.CCDData`, optional The type that data will be returned as. @@ -149,20 +166,8 @@ def get_data(self, data_label=None, spatial_subset=None, spectral_subset=None, f Data is returned as type cls with subsets applied. """ - # If function is a value ('sum' or 'minimum') or True and spatial and spectral - # are set, then we collapse the cube along the spatial subset using the function, then - # we apply the mask from the spectral subset. - # If function is any value other than False, we use specviz - if (function is not False and spectral_subset and spatial_subset) or function: - return self.specviz.get_data(data_label=data_label, spectral_subset=spectral_subset, - cls=cls, spatial_subset=spatial_subset, function=function) - elif function is False and spectral_subset: - raise ValueError("function cannot be False if spectral_subset" - " is set") - elif function is False: - function = None return self._get_data(data_label=data_label, spatial_subset=spatial_subset, - spectral_subset=spectral_subset, function=function, + spectral_subset=spectral_subset, cls=cls, use_display_units=use_display_units) # Need this method for Imviz Aperture Photometry plugin. diff --git a/jdaviz/configs/cubeviz/plugins/moment_maps/moment_maps.py b/jdaviz/configs/cubeviz/plugins/moment_maps/moment_maps.py index ac98d7cbcc..a567176b69 100644 --- a/jdaviz/configs/cubeviz/plugins/moment_maps/moment_maps.py +++ b/jdaviz/configs/cubeviz/plugins/moment_maps/moment_maps.py @@ -12,7 +12,7 @@ from jdaviz.core.events import SnackbarMessage, GlobalDisplayUnitChanged from jdaviz.core.registries import tray_registry from jdaviz.core.template_mixin import (PluginTemplateMixin, - DatasetSelectMixin, + DatasetSelect, DatasetSelectMixin, SpectralSubsetSelectMixin, AddResultsMixin, SelectPluginComponent, @@ -53,6 +53,10 @@ class MomentMap(PluginTemplateMixin, DatasetSelectMixin, SpectralSubsetSelectMix Subset to use for the continuum, or ``None`` to skip continuum subtraction, or ``Surrounding`` to use a region surrounding the subset set in ``spectral_subset``. + * ``continuum_dataset`` (:class:`~jdaviz.core.template_mixin.DatasetSelect`): + Dataset of the extracted 1D spectrum to use when visualizing the continuum. + The continuum will be redetermined based on the input cube (``dataset``) when + computing the moment map. * ``continuum_width``: Width, relative to the overall line spectral region, to fit the linear continuum (excluding the region containing the line). If 1, will use endpoints within line region @@ -68,6 +72,9 @@ class MomentMap(PluginTemplateMixin, DatasetSelectMixin, SpectralSubsetSelectMix template_file = __file__, "moment_maps.vue" uses_active_status = Bool(True).tag(sync=True) + continuum_dataset_items = List().tag(sync=True) + continuum_dataset_selected = Unicode().tag(sync=True) + n_moment = IntHandleEmpty(0).tag(sync=True) filename = Unicode().tag(sync=True) moment_available = Bool(False).tag(sync=True) @@ -88,6 +95,12 @@ def __init__(self, *args, **kwargs): self.moment = None + self.continuum_dataset = DatasetSelect(self, + 'continuum_dataset_items', + 'continuum_dataset_selected', + filters=['not_child_layer', + 'layer_in_spectrum_viewer']) + self.output_unit = SelectPluginComponent(self, items='output_unit_items', selected='output_unit_selected', @@ -121,7 +134,7 @@ def user_api(self): # NOTE: leaving save_as_fits out for now - we may want a more general API to do that # accross all plugins at some point return PluginUserApi(self, expose=('dataset', 'spectral_subset', - 'continuum', 'continuum_width', + 'continuum', 'continuum_dataset', 'continuum_width', 'n_moment', 'output_unit', 'reference_wavelength', 'add_results', 'calculate_moment')) @@ -190,17 +203,21 @@ def _set_data_units(self, event={}): self.send_state("output_radio_items") @observe("dataset_selected", "spectral_subset_selected", - "continuum_subset_selected", "continuum_width") + "continuum_subset_selected", "continuum_dataset_selected", "continuum_width") @skip_if_no_updates_since_last_active() def _calculate_continuum(self, msg=None): if not hasattr(self, 'dataset') or self.app._jdaviz_helper is None: # noqa # during initial init, this can trigger before the component is initialized return - + if self.continuum_dataset_selected == '': + # NOTE: we could send self.dataset.selected through + # spectral_extraction._extract_in_new_instance() to get a spectrum + # for the selected/default cube, + # but there is no visible spectrum to even show under the continuum + return # NOTE: there is no use in caching this, as the continuum will need to be re-computed # per-spaxel to use within calculating the moment map - _ = self._get_continuum(self.dataset, - None, + _ = self._get_continuum(self.continuum_dataset, self.spectral_subset, update_marks=True) @@ -235,10 +252,10 @@ def calculate_moment(self, add_data=True): else: cube = self.dataset.selected_obj else: - _, _, cube = self._get_continuum(self.dataset, - 'per-pixel', + _, _, cube = self._get_continuum(self.continuum_dataset, self.spectral_subset, - update_marks=False) + update_marks=False, + per_pixel=True) # slice out desired region # TODO: should we add a warning for a composite spectral subset? diff --git a/jdaviz/configs/cubeviz/plugins/moment_maps/moment_maps.vue b/jdaviz/configs/cubeviz/plugins/moment_maps/moment_maps.vue index b22a644f8c..f6fbfeec74 100644 --- a/jdaviz/configs/cubeviz/plugins/moment_maps/moment_maps.vue +++ b/jdaviz/configs/cubeviz/plugins/moment_maps/moment_maps.vue @@ -53,6 +53,15 @@ hint="Select spectral region that defines the continuum." /> + + diff --git a/jdaviz/configs/cubeviz/plugins/moment_maps/tests/test_moment_maps.py b/jdaviz/configs/cubeviz/plugins/moment_maps/tests/test_moment_maps.py index d919cf776b..6929181a6c 100644 --- a/jdaviz/configs/cubeviz/plugins/moment_maps/tests/test_moment_maps.py +++ b/jdaviz/configs/cubeviz/plugins/moment_maps/tests/test_moment_maps.py @@ -70,7 +70,7 @@ def test_moment_calculation(cubeviz_helper, spectrum1d_cube, tmp_path): mm._obj.vue_calculate_moment() assert mm._obj.moment_available - assert dc[1].label == 'moment 0' + assert dc[-1].label == 'moment 0' mv_data = cubeviz_helper.app.get_viewer( cubeviz_helper._default_uncert_viewer_reference_name ).data() @@ -78,7 +78,7 @@ def test_moment_calculation(cubeviz_helper, spectrum1d_cube, tmp_path): assert len(mv_data) == 1 assert mv_data[0].label == 'moment 0' - assert len(dc.links) == 14 + assert len(dc.links) == 19 # label should remain unchanged but raise overwrite warnings assert mm._obj.results_label == 'moment 0' @@ -99,9 +99,9 @@ def test_moment_calculation(cubeviz_helper, spectrum1d_cube, tmp_path): cubeviz_helper._default_flux_viewer_reference_name, 'moment 0' ) - result = dc[1].get_object(cls=CCDData) + result = dc[-1].get_object(cls=CCDData) assert result.shape == (4, 2) # Cube shape is (2, 2, 4) - assert isinstance(dc[1].coords, WCS) + assert isinstance(dc[-1].coords, WCS) # Make sure coordinate display now show moment map info (no WCS) label_mouseover._viewer_mouse_event(flux_viewer, {'event': 'mousemove', @@ -123,15 +123,15 @@ def test_moment_calculation(cubeviz_helper, spectrum1d_cube, tmp_path): assert mm._obj.results_label_overwrite is False mm._obj.vue_calculate_moment() - assert dc[2].label == 'moment 1' + assert dc[-1].label == 'moment 1' - assert len(dc.links) == 22 - assert len(dc.external_links) == 4 # pixel linked + assert len(dc.links) == 27 + assert len(dc.external_links) == 6 # pixel linked # Link 3D z to 2D x and 3D y to 2D y - assert (dc.external_links[0].cids1[0].label == "Pixel Axis 0 [z]" and - dc.external_links[0].cids2[0].label == "Pixel Axis 1 [x]" and - dc.external_links[1].cids1[0].label == "Pixel Axis 1 [y]" and - dc.external_links[1].cids2[0].label == "Pixel Axis 0 [y]") + assert (dc.external_links[2].cids1[0].label == "Pixel Axis 0 [z]" and + dc.external_links[2].cids2[0].label == "Pixel Axis 1 [x]" and + dc.external_links[3].cids1[0].label == "Pixel Axis 1 [y]" and + dc.external_links[3].cids2[0].label == "Pixel Axis 0 [y]") # Coordinate display should be unaffected. label_mouseover._viewer_mouse_event(flux_viewer, {'event': 'mousemove', diff --git a/jdaviz/configs/cubeviz/plugins/parsers.py b/jdaviz/configs/cubeviz/plugins/parsers.py index 90c7212b66..7c7b14f2b4 100644 --- a/jdaviz/configs/cubeviz/plugins/parsers.py +++ b/jdaviz/configs/cubeviz/plugins/parsers.py @@ -57,15 +57,13 @@ def parse_data(app, file_obj, data_type=None, data_label=None, parent=None): _parse_hdulist( app, file_obj, file_name=data_label, flux_viewer_reference_name=flux_viewer_reference_name, - spectrum_viewer_reference_name=spectrum_viewer_reference_name, uncert_viewer_reference_name=uncert_viewer_reference_name ) app.get_tray_item_from_name("Spectral Extraction").disabled_msg = "" elif isinstance(file_obj, str): if file_obj.lower().endswith('.gif'): # pragma: no cover _parse_gif(app, file_obj, data_label, - flux_viewer_reference_name=flux_viewer_reference_name, - spectrum_viewer_reference_name=spectrum_viewer_reference_name) + flux_viewer_reference_name=flux_viewer_reference_name) return file_name = os.path.basename(file_obj) @@ -101,7 +99,6 @@ def parse_data(app, file_obj, data_type=None, data_label=None, parent=None): _parse_jwst_s3d( app, hdulist, data_label, ext=ext, viewer_name=viewer_name, flux_viewer_reference_name=flux_viewer_reference_name, - spectrum_viewer_reference_name=spectrum_viewer_reference_name, parent=parent_data_label ) elif telescop == 'jwst' and filetype == 'r3d' and system == 'esa-pipeline': @@ -112,13 +109,11 @@ def parse_data(app, file_obj, data_type=None, data_label=None, parent=None): _parse_esa_s3d( app, hdulist, data_label, ext=ext, viewer_name=viewer_name, flux_viewer_reference_name=flux_viewer_reference_name, - spectrum_viewer_reference_name=spectrum_viewer_reference_name ) else: _parse_hdulist( app, hdulist, file_name=data_label or file_name, flux_viewer_reference_name=flux_viewer_reference_name, - spectrum_viewer_reference_name=spectrum_viewer_reference_name, uncert_viewer_reference_name=uncert_viewer_reference_name ) app.get_tray_item_from_name("Spectral Extraction").disabled_msg = "" @@ -131,7 +126,6 @@ def parse_data(app, file_obj, data_type=None, data_label=None, parent=None): _parse_spectrum1d_3d( app, file_obj, data_label=data_label, flux_viewer_reference_name=flux_viewer_reference_name, - spectrum_viewer_reference_name=spectrum_viewer_reference_name, uncert_viewer_reference_name=uncert_viewer_reference_name ) else: @@ -144,7 +138,6 @@ def parse_data(app, file_obj, data_type=None, data_label=None, parent=None): elif isinstance(file_obj, np.ndarray) and file_obj.ndim == 3: _parse_ndarray(app, file_obj, data_label=data_label, data_type=data_type, flux_viewer_reference_name=flux_viewer_reference_name, - spectrum_viewer_reference_name=spectrum_viewer_reference_name, uncert_viewer_reference_name=uncert_viewer_reference_name) app.get_tray_item_from_name("Spectral Extraction").disabled_msg = "" else: @@ -206,7 +199,6 @@ def _return_spectrum_with_correct_units(flux, wcs, metadata, data_type, target_w def _parse_hdulist(app, hdulist, file_name=None, flux_viewer_reference_name=None, - spectrum_viewer_reference_name=None, uncert_viewer_reference_name=None): if file_name is None and hasattr(hdulist, 'file_name'): file_name = hdulist.file_name @@ -278,14 +270,12 @@ def _parse_hdulist(app, hdulist, file_name=None, app.data_collection[data_label].get_component("flux").units = flux_unit # Add flux to top left image viewer app.add_data_to_viewer(flux_viewer_reference_name, data_label) - # Add flux to spectrum viewer - app.add_data_to_viewer(spectrum_viewer_reference_name, data_label) app._jdaviz_helper._loaded_flux_cube = app.data_collection[data_label] def _parse_jwst_s3d(app, hdulist, data_label, ext='SCI', viewer_name=None, flux_viewer_reference_name=None, - spectrum_viewer_reference_name=None, parent=None): + parent=None): hdu = hdulist[ext] data_type = _get_data_type_by_hdu(hdu) @@ -332,9 +322,6 @@ def _parse_jwst_s3d(app, hdulist, data_label, ext='SCI', if viewer_name is not None: app.add_data_to_viewer(viewer_name, data_label) - # Also add the collapsed flux to the spectrum viewer - if viewer_name == flux_viewer_reference_name: - app.add_data_to_viewer(spectrum_viewer_reference_name, data_label) if ext == 'DQ': app.add_data_to_viewer(flux_viewer_reference_name, data_label, visible=False) @@ -345,8 +332,7 @@ def _parse_jwst_s3d(app, hdulist, data_label, ext='SCI', app._jdaviz_helper._loaded_uncert_cube = app.data_collection[data_label] -def _parse_esa_s3d(app, hdulist, data_label, ext='DATA', flux_viewer_reference_name=None, - spectrum_viewer_reference_name=None): +def _parse_esa_s3d(app, hdulist, data_label, ext='DATA', flux_viewer_reference_name=None): hdu = hdulist[ext] data_type = _get_data_type_by_hdu(hdu) @@ -388,7 +374,6 @@ def _parse_esa_s3d(app, hdulist, data_label, ext='DATA', flux_viewer_reference_n app.data_collection[-1].get_component("flux").units = flux.unit app.add_data_to_viewer(flux_viewer_reference_name, data_label) - app.add_data_to_viewer(spectrum_viewer_reference_name, data_label) if data_type == 'flux': app._jdaviz_helper._loaded_flux_cube = app.data_collection[data_label] @@ -397,7 +382,7 @@ def _parse_esa_s3d(app, hdulist, data_label, ext='DATA', flux_viewer_reference_n def _parse_spectrum1d_3d(app, file_obj, data_label=None, - flux_viewer_reference_name=None, spectrum_viewer_reference_name=None, + flux_viewer_reference_name=None, uncert_viewer_reference_name=None): """Load spectrum1d as a cube.""" @@ -441,7 +426,6 @@ def _parse_spectrum1d_3d(app, file_obj, data_label=None, if attr == 'flux': app.add_data_to_viewer(flux_viewer_reference_name, cur_data_label) - app.add_data_to_viewer(spectrum_viewer_reference_name, cur_data_label) app._jdaviz_helper._loaded_flux_cube = app.data_collection[cur_data_label] elif attr == 'uncertainty': app.add_data_to_viewer(uncert_viewer_reference_name, cur_data_label) @@ -468,7 +452,7 @@ def _parse_spectrum1d(app, file_obj, data_label=None, spectrum_viewer_reference_ def _parse_ndarray(app, file_obj, data_label=None, data_type=None, - flux_viewer_reference_name=None, spectrum_viewer_reference_name=None, + flux_viewer_reference_name=None, uncert_viewer_reference_name=None): if data_label is None: data_label = app.return_data_label(file_obj) @@ -489,15 +473,13 @@ def _parse_ndarray(app, file_obj, data_label=None, data_type=None, if data_type == 'flux': app.add_data_to_viewer(flux_viewer_reference_name, data_label) - app.add_data_to_viewer(spectrum_viewer_reference_name, data_label) app._jdaviz_helper._loaded_flux_cube = app.data_collection[data_label] elif data_type == 'uncert': app.add_data_to_viewer(uncert_viewer_reference_name, data_label) app._jdaviz_helper._loaded_uncert_cube = app.data_collection[data_label] -def _parse_gif(app, file_obj, data_label=None, flux_viewer_reference_name=None, - spectrum_viewer_reference_name=None): # pragma: no cover +def _parse_gif(app, file_obj, data_label=None, flux_viewer_reference_name=None): # pragma: no cover # NOTE: Parsing GIF needs imageio and Pillow, both are which undeclared # in setup.cfg but might or might not be installed by declared ones. import imageio @@ -515,7 +497,6 @@ def _parse_gif(app, file_obj, data_label=None, flux_viewer_reference_name=None, app.add_data(s3d, data_label) app.add_data_to_viewer(flux_viewer_reference_name, data_label) - app.add_data_to_viewer(spectrum_viewer_reference_name, data_label) def _get_data_type_by_hdu(hdu): diff --git a/jdaviz/configs/cubeviz/plugins/slice/slice.py b/jdaviz/configs/cubeviz/plugins/slice/slice.py index 8cc5c5bd02..03a7945e81 100644 --- a/jdaviz/configs/cubeviz/plugins/slice/slice.py +++ b/jdaviz/configs/cubeviz/plugins/slice/slice.py @@ -68,6 +68,9 @@ class Slice(PluginTemplateMixin): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + self._cached_properties = ['valid_selection_values', 'valid_selection_values_sorted', + 'valid_indicator_values', 'valid_indicator_values_sorted', + 'valid_values', 'valid_values_sorted'] self._indicator_initialized = False self._player = None @@ -84,7 +87,7 @@ def __init__(self, *args, **kwargs): self.hub.subscribe(self, AddDataMessage, handler=self._on_add_data) self.hub.subscribe(self, RemoveDataMessage, - handler=self._on_valid_selection_values_changed) + handler=lambda _: self._clear_cache()) # connect any pre-existing viewers for viewer in self.app._viewer_store.values(): @@ -105,9 +108,8 @@ def _initialize_location(self, *args): # will be handled by a change to global display units) if not self.value_unit: for viewer in self.slice_indicator_viewers: - # ignore while x_display_unit is unset or still degrees (before data transpose) - # if we ever need the slice axis to be degrees, this will need to be loosened - if getattr(viewer.state, 'x_display_unit', None) not in (None, 'deg'): + # ignore while x_display_unit is unset + if getattr(viewer.state, 'x_display_unit', None) is not None: self.value_unit = viewer.state.x_display_unit break @@ -119,6 +121,7 @@ def _initialize_location(self, *args): # and just use the first layer with data. Once initialized, this logic will be # skipped going forward to not change any user selection (so will default to the # middle of the first found layer) + self._clear_cache() for viewer in self.slice_indicator_viewers: if str(viewer.state.x_att) not in self.valid_slice_att_names: # avoid setting value to degs, before x_att is changed to wavelength, for example @@ -136,7 +139,7 @@ def slice_display_unit_name(self): @property def valid_slice_att_names(self): - return _spectral_axis_names + ['Pixel Axis 2 [x]'] + return _spectral_axis_names + ['Pixel Axis 2 [x]', 'World 0'] @property def slice_selection_viewers(self): @@ -220,11 +223,11 @@ def _on_viewer_removed(self, msg): self._check_if_cube_viewer_exists() def _on_add_data(self, msg): + self._clear_cache() self._initialize_location() if isinstance(msg.viewer, WithSliceSelection): # instead of just setting viewer.slice_value, we'll make sure the "snapping" logic # is updated (if enabled) - self._on_valid_selection_values_changed() self._on_value_updated({'new': self.value}) def _on_select_slice_message(self, msg): @@ -240,12 +243,15 @@ def _on_global_display_unit_changed(self, msg): return prev_unit = u.Unit(self.value_unit) self.value_unit = str(msg.unit) - self._on_valid_selection_values_changed() + self._clear_cache() self.value = (self.value * prev_unit).to_value(msg.unit, equivalencies=u.spectral()) - def _on_valid_selection_values_changed(self, msg=None): - if 'valid_selection_values' in self.__dict__: - del self.__dict__['valid_selection_values'] + def _clear_cache(self, *attrs): + if not len(attrs): + attrs = self._cached_properties + for attr in attrs: + if attr in self.__dict__: + del self.__dict__[attr] @cached_property def valid_selection_values(self): @@ -255,12 +261,12 @@ def valid_selection_values(self): return np.array([]) return np.unique(np.concatenate([viewer.slice_values for viewer in viewers])) - @property + @cached_property def valid_selection_values_sorted(self): # all available slice values from cubes (sorted) return np.sort(self.valid_selection_values) - @property + @cached_property def valid_indicator_values(self): # all x-values in indicator viewers (unsorted) viewers = self.slice_indicator_viewers @@ -268,15 +274,15 @@ def valid_indicator_values(self): return np.array([]) return np.unique(np.concatenate([viewer.slice_values for viewer in viewers])) - @property + @cached_property def valid_indicator_values_sorted(self): return np.sort(self.valid_indicator_values) - @property + @cached_property def valid_values(self): return self.valid_selection_values if self.cube_viewer_exists else self.valid_indicator_values # noqa - @property + @cached_property def valid_values_sorted(self): return self.valid_selection_values_sorted if self.cube_viewer_exists else self.valid_indicator_values_sorted # noqa diff --git a/jdaviz/configs/cubeviz/plugins/slice/tests/test_slice.py b/jdaviz/configs/cubeviz/plugins/slice/tests/test_slice.py index 466ab1c297..06d647bc68 100644 --- a/jdaviz/configs/cubeviz/plugins/slice/tests/test_slice.py +++ b/jdaviz/configs/cubeviz/plugins/slice/tests/test_slice.py @@ -21,11 +21,13 @@ def test_slice(cubeviz_helper, spectrum1d_cube): assert not sl.is_playing cubeviz_helper.load_data(spectrum1d_cube, data_label='test') - app.add_data_to_viewer("spectrum-viewer", "test[FLUX]") app.add_data_to_viewer("flux-viewer", "test[FLUX]") app.add_data_to_viewer("uncert-viewer", "test[FLUX]") + app.add_data_to_viewer("spectrum-viewer", "Spectrum (sum)") + sv = cubeviz_helper.viewers['spectrum-viewer']._obj # sample cube only has 2 slices with wavelengths [4.62280007e-07 4.62360028e-07] m + assert len(sv.slice_values) == 2 assert len(sl.valid_indicator_values_sorted) == 2 slice_values = sl.valid_selection_values_sorted assert len(slice_values) == 2 @@ -92,11 +94,11 @@ def test_slice(cubeviz_helper, spectrum1d_cube): def test_indicator_settings(cubeviz_helper, spectrum1d_cube): + cubeviz_helper.load_data(spectrum1d_cube, data_label='test') app = cubeviz_helper.app - app.add_data(spectrum1d_cube, 'test') - app.add_data_to_viewer("spectrum-viewer", "test") - app.add_data_to_viewer("flux-viewer", "test") - sl = Slice(app=app) + app.add_data_to_viewer("flux-viewer", "test[FLUX]") + app.add_data_to_viewer("spectrum-viewer", "Spectrum (sum)") + sl = cubeviz_helper.plugins['Slice']._obj sv = app.get_viewer('spectrum-viewer') indicator = sv.slice_indicator diff --git a/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.py b/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.py index d5d558fcbc..528b172490 100644 --- a/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.py +++ b/jdaviz/configs/cubeviz/plugins/spectral_extraction/spectral_extraction.py @@ -3,14 +3,11 @@ import numpy as np import astropy -from astropy.utils.decorators import deprecated from astropy.nddata import ( NDDataArray, StdDevUncertainty ) from traitlets import Any, Bool, Dict, Float, List, Unicode, observe -from packaging.version import Version from photutils.aperture import CircularAperture, EllipticalAperture, RectangularAperture -from specutils import Spectrum1D from jdaviz.core.custom_traitlets import FloatHandleEmpty from jdaviz.core.events import SnackbarMessage, SliceValueUpdatedMessage @@ -23,6 +20,7 @@ ApertureSubsetSelect, AddResultsMixin, skip_if_no_updates_since_last_active, + skip_if_not_tray_instance, with_spinner, with_temp_disable) from jdaviz.core.user_api import PluginUserApi from jdaviz.core.region_translators import regions2aperture @@ -32,8 +30,6 @@ __all__ = ['SpectralExtraction'] -ASTROPY_LT_5_3_2 = Version(astropy.__version__) < Version('5.3.2') - @tray_registry( 'cubeviz-spectral-extraction', label="Spectral Extraction", viewer_requirements='spectrum' @@ -146,9 +142,6 @@ def __init__(self, *args, **kwargs): self.session.hub.subscribe(self, SliceValueUpdatedMessage, handler=self._on_slice_changed) - if ASTROPY_LT_5_3_2: - self.disabled_msg = "Spectral Extraction in Cubeviz requires astropy>=5.3.2" - if self.app.state.settings.get('server_is_remote', False): # when the server is remote, saving the file in python would save on the server, not # on the user's machine, so export support in cubeviz should be disabled @@ -167,14 +160,14 @@ def __init__(self, *args, **kwargs): @property def user_api(self): - expose = ['dataset', 'function', 'spatial_subset', 'aperture', + expose = ['dataset', 'function', 'aperture', 'add_results', 'collapse_to_spectrum', 'wavelength_dependent', 'reference_spectral_value', 'aperture_method'] if self.dev_bg_support: expose += ['background', 'bg_wavelength_dependent'] - return PluginUserApi(self, expose=expose, excl_from_dict=['spatial_subset']) + return PluginUserApi(self, expose=expose) @property def live_update_subscriptions(self): @@ -187,11 +180,6 @@ def __call__(self, add_data=True): def slice_display_unit_name(self): return 'spectral' - @property - @deprecated(since="3.9", alternative="aperture") - def spatial_subset(self): - return self.user_api.aperture - @observe('active_step') def _active_step_changed(self, *args): self.aperture._set_mark_visiblities(self.active_step in ('', 'ap', 'ext')) @@ -201,6 +189,47 @@ def _active_step_changed(self, *args): def slice_plugin(self): return self.app._jdaviz_helper.plugins['Slice'] + @observe('aperture_items') + @skip_if_not_tray_instance() + def _aperture_items_changed(self, msg): + if not hasattr(self, 'aperture'): + return + for item in msg['new']: + if item not in msg['old']: + if item.get('type') != 'spatial': + continue + subset_lbl = item.get('label') + try: + self._extract_in_new_instance(subset_lbl=subset_lbl, + auto_update=True, add_data=True) + except Exception: + msg = SnackbarMessage( + f"Automatic spectrum extraction for {subset_lbl} failed", + color='error', sender=self, timeout=10000) + else: + msg = SnackbarMessage( + f"Automatic spectrum extraction for {subset_lbl} successful", + color='success', sender=self) + self.app.hub.broadcast(msg) + + def _extract_in_new_instance(self, dataset=None, function='Sum', subset_lbl=None, + auto_update=False, add_data=False): + # create a new instance of the Spectral Extraction plugin (to not affect the instance in + # the tray) and extract the entire cube with defaults. + if dataset is None: + if self._app._jdaviz_helper._loaded_flux_cube is None: + return + dataset = self._app._jdaviz_helper._loaded_flux_cube.label + plg = self.new() + plg.dataset.selected = dataset + if subset_lbl is not None: + plg.aperture.selected = subset_lbl + plg.aperture_method.selected = 'Center' + plg.function.selected = function + plg.add_results.auto_update_result = auto_update + # all other settings remain at their plugin defaults + return plg(add_data=add_data) + @observe('wavelength_dependent', 'bg_wavelength_dependent') def _wavelength_dependent_changed(self, *args): if self.wavelength_dependent: @@ -257,8 +286,12 @@ def collapse_to_spectrum(self, add_data=True, **kwargs): if self.conflicting_aperture_and_function: raise ValueError(self.conflicting_aperture_error_message) - spectral_cube = self._app._jdaviz_helper._loaded_flux_cube - uncert_cube = self._app._jdaviz_helper._loaded_uncert_cube + spectral_cube = self.dataset.selected_dc_item + if self.dataset.selected == self._app._jdaviz_helper._loaded_flux_cube.label: + uncert_cube = self._app._jdaviz_helper._loaded_uncert_cube + else: + # TODO: allow selecting or associating an uncertainty cube? + uncert_cube = None uncertainties = None selected_func = self.function_selected.lower() @@ -298,11 +331,14 @@ def collapse_to_spectrum(self, add_data=True, **kwargs): uncertainties = uncert_cube.get_object(cls=StdDevUncertainty) flux = nddata.data << nddata.unit mask = nddata.mask + shape_mask = np.ones_like(nddata.data) # Use the spectral coordinate from the WCS: if '_orig_spec' in spectral_cube.meta: wcs = spectral_cube.meta['_orig_spec'].wcs.spectral - else: + elif hasattr(spectral_cube.coords, 'spectral'): wcs = spectral_cube.coords.spectral + else: + wcs = None # Filter out NaNs (False = good) mask = np.logical_or(mask, np.isnan(flux)) @@ -344,6 +380,9 @@ def collapse_to_spectrum(self, add_data=True, **kwargs): else: target_wave_unit = spectral_cube.coords.spectral.world_axis_units[0] + if target_wave_unit == '': + target_wave_unit = 'pix' + flux = collapsed_nddata.data << collapsed_nddata.unit mask = collapsed_nddata.mask uncertainty = collapsed_nddata.uncertainty @@ -365,6 +404,8 @@ def collapse_to_spectrum(self, add_data=True, **kwargs): collapsed_spec.meta['_pixel_scale_factor'] = pix_scale_factor if add_data: + if default_color := self.aperture.selected_item.get('color', None): + collapsed_spec.meta['_default_color'] = default_color self.add_results.add_results_from_plugin(collapsed_spec) snackbar_message = SnackbarMessage( @@ -377,8 +418,7 @@ def collapse_to_spectrum(self, add_data=True, **kwargs): def get_aperture(self): # Retrieve flux cube and create an array to represent the cone mask - flux_cube = self._app._jdaviz_helper._loaded_flux_cube.get_object(cls=Spectrum1D, - statistic=None) + flux_cube = self.dataset.selected_obj display_unit = astropy.units.Unit(self.app._get_display_unit(self.slice_display_unit_name)) # if subset is a composite subset, skip the other logic: @@ -494,19 +534,19 @@ def _save_extracted_spec_to_fits(self, overwrite=False, *args): f"Extracted spectrum saved to {os.path.abspath(filename)}", sender=self, color="success")) - @observe('aperture_selected') + @observe('aperture_selected', 'function_selected') def _set_default_results_label(self, event={}): - label = "Spectral extraction" - - if ( - hasattr(self, 'aperture') and - self.aperture.selected != self.aperture.default_text - ): - label += f' ({self.aperture_selected})' - self.results_label_default = label + if not hasattr(self, 'aperture'): + return + if self.aperture.selected == self.aperture.default_text: + self.results_label_default = f"Spectrum ({self.function_selected.lower()})" + else: + self.results_label_default = f"Spectrum ({self.aperture_selected}, {self.function_selected.lower()})" # noqa @property def marks(self): + if not self._tray_instance: + return {} marks = {} for id, viewer in self.app._viewer_store.items(): if not isinstance(viewer, CubevizProfileView): @@ -544,6 +584,8 @@ def _toggle_marks(self, event={}): @skip_if_no_updates_since_last_active() @with_temp_disable(timeout=0.3) def _live_update(self, event={}): + if not self._tray_instance: + return if not self.show_live_preview or not self.is_active: self._clear_marks() return diff --git a/jdaviz/configs/cubeviz/plugins/tests/test_cubeviz_helper.py b/jdaviz/configs/cubeviz/plugins/tests/test_cubeviz_helper.py index 37a359801f..09e01d71c9 100644 --- a/jdaviz/configs/cubeviz/plugins/tests/test_cubeviz_helper.py +++ b/jdaviz/configs/cubeviz/plugins/tests/test_cubeviz_helper.py @@ -1,9 +1,3 @@ -import pytest - -from astropy import units as u -from astropy.tests.helper import assert_quantity_allclose -from specutils import Spectrum1D - from glue.core.roi import XRangeROI from jdaviz import Cubeviz @@ -27,9 +21,6 @@ def test_plugin_user_apis(cubeviz_helper): for plugin_name, plugin_api in cubeviz_helper.plugins.items(): plugin = plugin_api._obj for attr in plugin_api._expose: - if plugin_name == 'Spectral Extraction' and attr == 'spatial_subset': - # deprecated, so would raise a deprecation warning - continue if plugin_name == 'Slice' and attr in ('slice', 'wavelength', 'wavelength_unit', 'show_wavelength'): # deprecated, so would raise a deprecation warning @@ -37,49 +28,6 @@ def test_plugin_user_apis(cubeviz_helper): assert hasattr(plugin, attr) -def test_invalid_function(cubeviz_helper, spectrum1d_cube): - cubeviz_helper.load_data(spectrum1d_cube, "test") - cubeviz_helper._apply_interactive_region('bqplot:ellipse', (0, 0), (9, 8)) - - with pytest.raises(ValueError, match='function 42 not in list of valid '): - cubeviz_helper.get_data(data_label="test[FLUX]", spatial_subset='Subset 1', function=42) - - # Also make sure specviz redshift slider warning does not show up. - # https://github.com/spacetelescope/jdaviz/issues/2029 - cubeviz_helper.specviz.y_limits(0, 3) - - -def test_valid_function(cubeviz_helper, spectrum1d_cube): - cubeviz_helper.load_data(spectrum1d_cube, "test") - cubeviz_helper._apply_interactive_region('bqplot:ellipse', (0, 0), (9, 8)) - - results_cube = cubeviz_helper.get_data(data_label="test[FLUX]", - spatial_subset='Subset 1') - assert results_cube.flux.ndim == 3 - results_false = cubeviz_helper.get_data(data_label="test[FLUX]", - spatial_subset='Subset 1', function=False) - assert results_false.flux.ndim == 3 - - results_def = cubeviz_helper.get_data(data_label="test[FLUX]", - spatial_subset='Subset 1', function=True) - assert results_def.flux.ndim == 1 - - results_min = cubeviz_helper.get_data(data_label="test[FLUX]", - spatial_subset='Subset 1', function="minimum") - results_max = cubeviz_helper.get_data(data_label="test[FLUX]", - spatial_subset='Subset 1', function="maximum") - assert isinstance(results_min, Spectrum1D) - assert_quantity_allclose(results_min.flux, - [6., 14.] * u.Jy, atol=1e-5 * u.Jy) - assert_quantity_allclose(results_max.flux, - [7., 15.] * u.Jy, atol=1e-5 * u.Jy) - - # calling without function gives cube - assert cubeviz_helper.get_data(data_label="test[FLUX]").flux.ndim == 3 - # but calling through specviz automatically collapses - assert cubeviz_helper.specviz.get_data(data_label="test[FLUX]").flux.ndim == 1 - - def test_remote_server_disable_save_serverside(): config = get_configuration('cubeviz') config['settings']['server_is_remote'] = True @@ -94,47 +42,18 @@ def test_remote_server_disable_save_serverside(): def test_get_data_spatial_and_spectral(cubeviz_helper, spectrum1d_cube_larger): - data_label = "test" - spatial_subset = "Subset 1" - spectral_subset = "Subset 2" - cubeviz_helper.load_data(spectrum1d_cube_larger, data_label) + cubeviz_helper.load_data(spectrum1d_cube_larger, data_label="test") + # Subset 1 (spatial) cubeviz_helper._apply_interactive_region('bqplot:ellipse', (0, 0), (9, 8)) + # Subset 2 (spectral) spec_viewer = cubeviz_helper.app.get_viewer(cubeviz_helper._default_spectrum_viewer_reference_name) # noqa spec_viewer.apply_roi(XRangeROI(4.62440061e-07, 4.62520112e-07)) - data_label = data_label + "[FLUX]" - # This will be the same if function is None or True - spatial_with_spec = cubeviz_helper.get_data(data_label=data_label, - spatial_subset=spatial_subset, - spectral_subset=spectral_subset) + spatial_with_spec = cubeviz_helper.get_data(data_label="Spectrum (Subset 1, sum)", + spectral_subset="Subset 2") assert spatial_with_spec.flux.ndim == 1 assert list(spatial_with_spec.mask) == [True, True, False, False, True, True, True, True, True, True] assert max(list(spatial_with_spec.flux.value)) == 157. assert min(list(spatial_with_spec.flux.value)) == 13. - - spatial_with_spec = cubeviz_helper.get_data(data_label=data_label, - spatial_subset=spatial_subset, - spectral_subset=spectral_subset, - function='minimum') - assert max(list(spatial_with_spec.flux.value)) == 78. - assert min(list(spatial_with_spec.flux.value)) == 6. - - collapse_with_spectral = cubeviz_helper.get_data(data_label=data_label, - spectral_subset=spectral_subset, - function=True) - collapse_with_spectral2 = cubeviz_helper.get_data(data_label=data_label, - function=True) - - assert list(collapse_with_spectral.flux) == list(collapse_with_spectral2.flux) - - with pytest.raises(ValueError, match=f'{spectral_subset} is not a spatial subset.'): - cubeviz_helper.get_data(data_label=data_label, spatial_subset=spectral_subset, - function=True) - with pytest.raises(ValueError, match=f'{spatial_subset} is not a spectral subset.'): - cubeviz_helper.get_data(data_label=data_label, spectral_subset=spatial_subset, - function=True) - with pytest.raises(ValueError, match='function cannot be False if spectral_subset'): - cubeviz_helper.get_data(data_label=data_label, spectral_subset=spectral_subset, - function=False) diff --git a/jdaviz/configs/cubeviz/plugins/tests/test_data_retrieval.py b/jdaviz/configs/cubeviz/plugins/tests/test_data_retrieval.py index b5f1dfc6f5..ee1acdef34 100644 --- a/jdaviz/configs/cubeviz/plugins/tests/test_data_retrieval.py +++ b/jdaviz/configs/cubeviz/plugins/tests/test_data_retrieval.py @@ -28,7 +28,7 @@ def test_data_retrieval(cubeviz_helper): # two ways of retrieving data from the viewer. # They should return the same spectral values a1 = cubeviz_helper.app.get_viewer(spectrum_viewer_reference_name).data() - a2 = cubeviz_helper.get_data("contents[FLUX]", function="sum") + a2 = cubeviz_helper.get_data("Spectrum (sum)") test_value_1 = a1[0].data test_value_2 = a2.flux.value diff --git a/jdaviz/configs/cubeviz/plugins/tests/test_parsers.py b/jdaviz/configs/cubeviz/plugins/tests/test_parsers.py index 8a42cc422a..9c5206a811 100644 --- a/jdaviz/configs/cubeviz/plugins/tests/test_parsers.py +++ b/jdaviz/configs/cubeviz/plugins/tests/test_parsers.py @@ -18,7 +18,7 @@ def test_fits_image_hdu_parse(image_cube_hdu_obj, cubeviz_helper): cubeviz_helper.load_data(image_cube_hdu_obj) - assert len(cubeviz_helper.app.data_collection) == 3 + assert len(cubeviz_helper.app.data_collection) == 4 # 3 cubes and extracted spectrum assert cubeviz_helper.app.data_collection[0].label == "Unknown HDU object[FLUX]" # first load should be successful; subsequent attempts should fail @@ -31,7 +31,7 @@ def test_fits_image_hdu_with_microns(image_cube_hdu_obj_microns, cubeviz_helper) # Passing in data_label keyword as posarg. cubeviz_helper.load_data(image_cube_hdu_obj_microns, 'has_microns') - assert len(cubeviz_helper.app.data_collection) == 3 + assert len(cubeviz_helper.app.data_collection) == 4 # 3 cubes and extracted spectrum assert cubeviz_helper.app.data_collection[0].label == 'has_microns[FLUX]' flux_cube = cubeviz_helper.app.data_collection[0].get_object(Spectrum1D, statistic=None) @@ -90,7 +90,7 @@ def test_fits_image_hdu_parse_from_file(tmpdir, image_cube_hdu_obj, cubeviz_help image_cube_hdu_obj.writeto(path, overwrite=True) cubeviz_helper.load_data(path) - assert len(cubeviz_helper.app.data_collection) == 3 + assert len(cubeviz_helper.app.data_collection) == 4 # 3 cubes and auto-extracted spectrum assert cubeviz_helper.app.data_collection[0].label == "test_fits_image.fits[FLUX]" # This tests the same data as test_fits_image_hdu_parse above. @@ -129,7 +129,7 @@ def test_spectrum3d_parse(image_cube_hdu_obj, cubeviz_helper): cubeviz_helper.load_data(sc) data = cubeviz_helper.app.data_collection[0] - assert len(cubeviz_helper.app.data_collection) == 1 + assert len(cubeviz_helper.app.data_collection) == 2 assert data.label == "Unknown spectrum object[FLUX]" assert data.shape == flux.shape @@ -187,13 +187,13 @@ def test_numpy_cube(cubeviz_helper): cubeviz_helper.load_data(arr, data_type='foo') cubeviz_helper.load_data(arr) - cubeviz_helper.load_data(arr << u.nJy, data_label='uncert_array', data_type='uncert', + cubeviz_helper.load_data(arr, data_label='uncert_array', data_type='uncert', override_cube_limit=True) with pytest.raises(RuntimeError, match='Only one cube'): cubeviz_helper.load_data(arr, data_type='mask') - assert len(cubeviz_helper.app.data_collection) == 2 + assert len(cubeviz_helper.app.data_collection) == 3 # flux cube, uncert cube, Spectrum (sum) # Check context of first cube. data = cubeviz_helper.app.data_collection[0] @@ -209,7 +209,7 @@ def test_numpy_cube(cubeviz_helper): assert data.label == 'uncert_array' assert data.shape == (4, 3, 2) # x, y, z assert isinstance(data.coords, PaddedSpectrumWCS) - assert flux.units == 'nJy' + assert flux.units == 'ct' def test_invalid_data_types(cubeviz_helper): diff --git a/jdaviz/configs/cubeviz/plugins/tests/test_regions.py b/jdaviz/configs/cubeviz/plugins/tests/test_regions.py index 7949347d25..751f738327 100644 --- a/jdaviz/configs/cubeviz/plugins/tests/test_regions.py +++ b/jdaviz/configs/cubeviz/plugins/tests/test_regions.py @@ -69,8 +69,9 @@ def test_spatial_spectral_mix(self): # https://github.com/spacetelescope/jdaviz/issues/1584 with pytest.warns(UserWarning, match='Applying the value from the redshift slider'): spectral_subsets = self.cubeviz.specviz.get_spectra() - assert list(spectral_subsets.keys()) == ['has_microns[FLUX]', - 'has_microns[FLUX] (Subset 1)', - 'has_microns[FLUX] (Subset 2)'], spectral_subsets # noqa + assert list(spectral_subsets.keys()) == ['Spectrum (sum)', + 'Spectrum (Subset 1, sum)', + 'Spectrum (sum) (Subset 2)', + 'Spectrum (Subset 1, sum) (Subset 2)'] for sp in spectral_subsets.values(): assert isinstance(sp, Spectrum1D) diff --git a/jdaviz/configs/cubeviz/plugins/tests/test_tools.py b/jdaviz/configs/cubeviz/plugins/tests/test_tools.py index 6c9967ec00..7c1129ff59 100644 --- a/jdaviz/configs/cubeviz/plugins/tests/test_tools.py +++ b/jdaviz/configs/cubeviz/plugins/tests/test_tools.py @@ -1,5 +1,5 @@ import pytest -from astropy.nddata import CCDData +import numpy as np from echo import delay_callback from regions import RectanglePixelRegion @@ -107,8 +107,8 @@ def test_spectrum_at_spaxel_altkey_true(cubeviz_helper, spectrum1d_cube): assert isinstance(reg, RectanglePixelRegion) # Using altKey should create a new subset - x = 2 - y = 2 + x = 0 + y = 0 flux_viewer.toolbar.active_tool.on_mouse_event( {'event': 'click', 'domain': {'x': x, 'y': y}, 'altKey': True}) assert len(flux_viewer.native_marks) == 4 @@ -141,28 +141,26 @@ def test_spectrum_at_spaxel_altkey_true(cubeviz_helper, spectrum1d_cube): def test_spectrum_at_spaxel_with_2d(cubeviz_helper): - # Use 2D image, which should not work with the tool - x = CCDData([[1, 2, 3], [4, 5, 6]], unit='adu') + # Use cube with single slice + x = np.array([[[1, 2, 3], [4, 5, 6]]]) - app = cubeviz_helper.app - app.data_collection["test"] = x - app.add_data_to_viewer("flux-viewer", "test") + cubeviz_helper.load_data(x, data_label='test') flux_viewer = cubeviz_helper.app.get_viewer("flux-viewer") spectrum_viewer = cubeviz_helper.app.get_viewer("spectrum-viewer") # Set the active tool to spectrumperspaxel flux_viewer.toolbar.active_tool = flux_viewer.toolbar.tools['jdaviz:spectrumperspaxel'] - x = 1 - y = 1 + x = 0 + y = 0 assert len(flux_viewer.native_marks) == 2 - assert len(spectrum_viewer.data()) == 0 + assert len(spectrum_viewer.data()) == 1 # Spectrum (sum) # Click on spaxel location flux_viewer.toolbar.active_tool.on_mouse_event( {'event': 'click', 'domain': {'x': x, 'y': y}, 'altKey': False}) assert len(flux_viewer.native_marks) == 3 - assert len(spectrum_viewer.data()) == 0 + assert len(spectrum_viewer.data()) == 2 # Spectrum (sum), Spectrum (Subset 1, sum) # Deselect tool flux_viewer.toolbar.active_tool = None diff --git a/jdaviz/configs/cubeviz/plugins/viewers.py b/jdaviz/configs/cubeviz/plugins/viewers.py index 8d1316af80..24ee29b41c 100644 --- a/jdaviz/configs/cubeviz/plugins/viewers.py +++ b/jdaviz/configs/cubeviz/plugins/viewers.py @@ -2,18 +2,13 @@ import astropy.units as u from functools import cached_property from glue.core import BaseData - -from glue.core.subset_group import GroupedSubset -from bqplot import Lines from glue_jupyter.bqplot.image import BqplotImageView from jdaviz.core.registries import viewer_registry -from jdaviz.core.marks import SliceIndicatorMarks, ShadowSpatialSpectral +from jdaviz.core.marks import SliceIndicatorMarks from jdaviz.configs.default.plugins.viewers import JdavizViewerMixin from jdaviz.configs.specviz.plugins.viewers import SpecvizProfileView -from jdaviz.core.events import AddDataMessage, RemoveDataMessage, GlobalDisplayUnitChanged from jdaviz.core.freezable_state import FreezableBqplotImageViewerState -from jdaviz.utils import get_subset_type __all__ = ['CubevizImageView', 'CubevizProfileView', 'WithSliceIndicator', 'WithSliceSelection'] @@ -35,25 +30,25 @@ def slice_indicator(self): self.figure.marks = self.figure.marks + slice_indicator.marks return slice_indicator - @cached_property + @property def slice_values(self): + # NOTE: these are cached at the slice-plugin level + # Retrieve display units + slice_display_units = self.jdaviz_app._get_display_unit( + self.slice_display_unit_name + ) def _get_component(layer): - # Retrieve display units - slice_display_units = self.jdaviz_app._get_display_unit( - self.slice_display_unit_name - ) - try: # Retrieve layer data and units - data_obj = layer.layer.data.get_component(self.slice_component_label).data - data_units = layer.layer.data.get_component(self.slice_component_label).units + data_comp = layer.layer.data.get_component(self.slice_component_label) except (AttributeError, KeyError): # layer either does not have get_component (because its a subset) # or slice_component_label is not a component in this layer # either way, return an empty array and skip this layer return np.array([]) - + data_obj = data_comp.data + data_units = data_comp.units data_spec_axis = np.asarray(data_obj.data, dtype=float) * u.Unit(data_units) # Convert axis if display units are set and are different @@ -91,8 +86,9 @@ def slice_component_label(self): def slice_display_unit_name(self): return 'spectral' - @cached_property + @property def slice_values(self): + # NOTE: these are cached at the slice-plugin level # TODO: add support for multiple cubes (but then slice selection needs to be more complex) # if slice_index is 0, then we want the equivalent of [:, 0, 0] # if slice_index is 1, then we want the equivalent of [0, :, 0] @@ -126,7 +122,7 @@ def slice_values(self): if slice_display_units and slice_display_units != data_units: converted_axis = (data_spec_axis * u.Unit(data_units)).to_value( slice_display_units, - equivalencies=u.spectral() + equivalencies=u.spectral() + u.pixel_scale(1*u.pix) ) else: converted_axis = data_spec_axis @@ -189,13 +185,6 @@ def __init__(self, *args, **kwargs): # Hide axes by default self.state.show_axes = False - self.hub.subscribe(self, GlobalDisplayUnitChanged, - handler=self._on_global_display_unit_changed - ) - - self.hub.subscribe(self, AddDataMessage, handler=self._on_global_display_unit_changed) - self.hub.subscribe(self, RemoveDataMessage, handler=self._on_global_display_unit_changed) - @property def _default_spectrum_viewer_reference_name(self): return self.jdaviz_helper._default_spectrum_viewer_reference_name @@ -208,11 +197,6 @@ def _default_flux_viewer_reference_name(self): def _default_uncert_viewer_reference_name(self): return self.jdaviz_helper._default_uncert_viewer_reference_name - def _on_global_display_unit_changed(self, msg): - # Clear cache of slice values when units change - if 'slice_values' in self.__dict__: - del self.__dict__['slice_values'] - def _initial_x_axis(self, *args): # Make sure that the x_att is correct on data load ref_data = self.state.reference_data @@ -259,171 +243,6 @@ def __init__(self, *args, **kwargs): kwargs.setdefault('default_tool_priority', ['jdaviz:selectslice']) super().__init__(*args, **kwargs) - self.hub.subscribe(self, RemoveDataMessage, - handler=self._check_if_data_removed) - - # TODO: Find out why this is not working - # self.hub.subscribe(self, DataCollectionDeleteMessage, - # handler=self._check_if_data_removed) - - self.hub.subscribe(self, AddDataMessage, - handler=self._check_if_data_added) - - self.hub.subscribe(self, GlobalDisplayUnitChanged, - handler=self._on_global_display_unit_changed) - @property def _default_flux_viewer_reference_name(self): return self.jdaviz_helper._default_flux_viewer_reference_name - - def _on_global_display_unit_changed(self, msg=None): - # Clear cache of slice values when units change - if 'slice_values' in self.__dict__: - del self.__dict__['slice_values'] - - def _check_if_data_removed(self, msg): - # isinstance and the data uuid check will be true for the data - # that is being removed - self.figure.marks = [m for m in self.figure.marks - if not (isinstance(m, ShadowSpatialSpectral) - and m.data_uuid == msg.data.uuid)] - self._on_global_display_unit_changed() - - def _check_if_data_added(self, msg=None): - # When data is added, make sure that all spatial subset layers - # that correspond with that data are checked for intersections - # with spectral subset layers - for layer in self.state.layers: - if layer.layer.data.label == msg.data.label: - if (isinstance(layer.layer, GroupedSubset) and - get_subset_type(layer.layer.subset_state) == 'spatial'): - self._expected_subset_layer_default(layer) - self._on_global_display_unit_changed() - - def _is_spatial_subset(self, layer): - subset_state = getattr(layer.layer, 'subset_state', None) - return get_subset_type(subset_state) == 'spatial' - - def _get_spatial_subset_layers(self, data_label=None): - if data_label: - return [ls for ls in self.state.layers if (ls.layer.data.label == data_label and - self._is_spatial_subset(ls))] - return [ls for ls in self.state.layers if self._is_spatial_subset(ls)] - - def _is_spectral_subset(self, layer): - subset_state = getattr(layer.layer, 'subset_state', None) - return get_subset_type(subset_state) == 'spectral' - - def _get_spectral_subset_layers(self, data_label=None): - if data_label: - return [ls for ls in self.state.layers if (ls.layer.data.label == data_label and - self._is_spectral_subset(ls))] - return [ls for ls in self.state.layers if self._is_spectral_subset(ls)] - - def _get_marks_for_layers(self, layers): - layers_list = list(self.state.layers) - # here we'll assume that all custom marks are subclasses of Lines/GL but don't directly - # use Lines/LinesGL (so an isinstance check won't be sufficient here) - layer_marks = self.native_marks - # and now we'll assume that the marks are in the same order as the layers, this should - # be the case as long as the order isn't manually resorted. If for any reason the layer - # is added but the mark has not yet been created, this will ignore that entry rather than - # raising an IndexError. - inds = [layers_list.index(layer) for layer in layers] - return [layer_marks[ind] for ind in inds if ind < len(layer_marks)] - - def _on_subset_delete(self, msg): - # delete any ShadowSpatialSpectral mark for which either of the spectral or spatial marks - # no longer exists by matching the uuid of the msg subset to the uuid of the subsets - # in ShadowSpatialSpectral - super()._on_subset_delete(msg) - self.figure.marks = [m for m in self.figure.marks - if not (isinstance(m, ShadowSpatialSpectral) - and msg.subset.uuid in [m.spatial_uuid, m.spectral_uuid])] - - def _expected_subset_layer_default(self, layer_state): - """ - This gets called whenever the layer of an expected new subset is added, we want to set the - default for the linewidth depending on whether it is spatial or spectral, and handle - creating any necessary marks for spatial-spectral subset intersections. - """ - def _marks_are_same(m, other): - # Check if ShadowSpatialSpectral mark already exists for particular - # data, spatial subset, and spectral subset combo - if (m.data_uuid == other.data_uuid - and m.spatial_uuid == other.spatial_uuid - and m.spectral_uuid == other.spectral_uuid): - return True - return False - - def _is_unique(m): - unique = True - for m in existing_shadows_for_data: - if _marks_are_same(m, new_shadow): - unique = False - break - return unique - - super()._expected_subset_layer_default(layer_state) - subset_type = get_subset_type(layer_state.layer) - if subset_type is None: - return - - this_mark = self._get_marks_for_layers([layer_state])[0] - # new ShadowSpatialSpectral marks to be added - new_marks = [] - # ShadowSpatialSpectral marks that already exists in the viewer - existing_shadows_for_data = [m for m in self.figure.marks - if isinstance(m, ShadowSpatialSpectral)] - if subset_type == 'spatial': - layer_state.linewidth = 1 - - # need to add marks for every intersection between THIS spatial subset and ALL spectral - # subset marks corresponding to this data - spectral_layers = [sub_layer for sub_layer in - self._get_spectral_subset_layers(layer_state.layer.data.label)] - spectral_marks = self._get_marks_for_layers(spectral_layers) - - for index, spectral_mark in enumerate(spectral_marks): - new_shadow = ShadowSpatialSpectral(spatial_spectrum_mark=this_mark, - spectral_subset_mark=spectral_mark, - spatial_uuid=layer_state.layer.uuid, - spectral_uuid=spectral_layers[index].layer.uuid, - data_uuid=layer_state.layer.data.uuid) - if _is_unique(new_shadow): - new_marks.append(new_shadow) - - # change opacity for live-collapsed spectra from spatial subsets in Cubeviz: - spatial_layers = [sub_layer for sub_layer in - self._get_spatial_subset_layers(layer_state.layer.data.label)] - spatial_marks = self._get_marks_for_layers(spatial_layers) - for layer, mark in zip(spatial_layers, spatial_marks): - # update profile opacities for spatial subset: - if isinstance(mark, Lines): - mark.set_trait( - 'opacities', - # set the alpha for the spectrum in the profile viewer - # to be 50% more opaque than the alpha for the spatial subset - # in the flux-viewer - [min(1.5 * layer.alpha, 1)] - ) - - elif subset_type == 'spectral': - # need to add marks for every intersection between THIS spectral subset and ALL spatial - # subset marks corresponding to this data - spatial_layers = [sub_layer for sub_layer in - self._get_spatial_subset_layers(layer_state.layer.data.label)] - spatial_marks = self._get_marks_for_layers(spatial_layers) - for index, spatial_mark in enumerate(spatial_marks): - new_shadow = ShadowSpatialSpectral(spatial_spectrum_mark=spatial_mark, - spectral_subset_mark=this_mark, - spatial_uuid=spatial_layers[index].layer.uuid, - spectral_uuid=layer_state.layer.uuid, - data_uuid=layer_state.layer.data.uuid) - if _is_unique(new_shadow): - new_marks.append(new_shadow) - - else: - return - # NOTE: += or append won't pick up on change - self.figure.marks = self.figure.marks + new_marks diff --git a/jdaviz/configs/default/plugins/collapse/tests/test_collapse.py b/jdaviz/configs/default/plugins/collapse/tests/test_collapse.py index a35a30e9b0..d140afaeed 100644 --- a/jdaviz/configs/default/plugins/collapse/tests/test_collapse.py +++ b/jdaviz/configs/default/plugins/collapse/tests/test_collapse.py @@ -20,22 +20,22 @@ def test_linking_after_collapse(cubeviz_helper, spectral_cube_wcs): coll.vue_collapse() assert coll.results_label_overwrite is True - assert len(dc) == 2 - assert dc[1].label == 'collapsed' - assert len(dc.external_links) == 2 + assert len(dc) == 3 + assert dc[2].label == 'collapsed' + assert len(dc.external_links) == 4 # Link 3D z to 2D x and 3D y to 2D y # Link 1: # Pixel Axis 0 [z] from cube.pixel_component_ids[0] # Pixel Axis 1 [x] from plugin.pixel_component_ids[1] - assert dc.external_links[0].cids1[0] == dc[0].pixel_component_ids[0] - assert dc.external_links[0].cids2[0] == dc[-1].pixel_component_ids[1] + assert dc.external_links[2].cids1[0] == dc[0].pixel_component_ids[0] + assert dc.external_links[2].cids2[0] == dc[-1].pixel_component_ids[1] # Link 2: # Pixel Axis 1 [y] from cube.pixel_component_ids[1] # Pixel Axis 0 [y] from plugin.pixel_component_ids[0] - assert dc.external_links[1].cids1[0] == dc[0].pixel_component_ids[1] - assert dc.external_links[1].cids2[0] == dc[-1].pixel_component_ids[0] + assert dc.external_links[3].cids1[0] == dc[0].pixel_component_ids[1] + assert dc.external_links[3].cids2[0] == dc[-1].pixel_component_ids[0] def test_save_collapsed_to_fits(cubeviz_helper, spectral_cube_wcs, tmp_path): diff --git a/jdaviz/configs/default/plugins/gaussian_smooth/gaussian_smooth.py b/jdaviz/configs/default/plugins/gaussian_smooth/gaussian_smooth.py index 9e83cc2abb..edf226d5c7 100644 --- a/jdaviz/configs/default/plugins/gaussian_smooth/gaussian_smooth.py +++ b/jdaviz/configs/default/plugins/gaussian_smooth/gaussian_smooth.py @@ -44,7 +44,6 @@ class GaussianSmooth(PluginTemplateMixin, DatasetSelectMixin, AddResultsMixin): """ template_file = __file__, "gaussian_smooth.vue" stddev = FloatHandleEmpty(1).tag(sync=True) - selected_data_is_1d = Bool(True).tag(sync=True) show_modes = Bool(False).tag(sync=True) mode_items = List().tag(sync=True) mode_selected = Unicode().tag(sync=True) @@ -55,7 +54,6 @@ def __init__(self, *args, **kwargs): if self.config == "cubeviz": self.docs_description = 'Smooth data cube spatially or spectrally with a Gaussian kernel.' # noqa self.show_modes = True - # retrieve the data from the cube, not the collapsed 1d spectrum self.dataset._viewers = [ self._default_flux_viewer_reference_name, self._default_spectrum_viewer_reference_name @@ -74,8 +72,8 @@ def __init__(self, *args, **kwargs): selected='mode_selected', manual_options=['Spectral', 'Spatial']) - # set the filter on the viewer options - self._update_viewer_filters() + # set the filter on the dataset and viewer options + self._update_dataset_viewer_filters() @property def _default_spectrum_viewer_reference_name(self): @@ -115,23 +113,30 @@ def _set_default_results_label(self, event={}): self.app.return_data_label(f"{dataset}{smooth_type} {stddev}", check_unique=False)) @observe("dataset_selected") - def _on_data_selected(self, event={}): + def _update_viewer_filters(self, event={}): if not hasattr(self, 'dataset'): # during initial init, this can trigger before the component is initialized return - # NOTE: if this is ever used anywhere else, it should be moved into DatasetSelect if self.dataset.selected_dc_item is not None: - self.selected_data_is_1d = len(self.dataset.selected_dc_item.data.shape) == 1 + selected_data_is_1d = len(self.dataset.selected_dc_item.data.shape) == 1 + + if selected_data_is_1d: + # only want spectral viewers in the options + self.add_results.viewer.filters = ['is_spectrum_viewer'] + else: + # only want image viewers in the options + self.add_results.viewer.filters = ['is_image_viewer'] @observe("mode_selected") - def _update_viewer_filters(self, event={}): + def _update_dataset_viewer_filters(self, event={}): if event.get('new', self.mode_selected) == 'Spatial': - # only want image viewers in the options - self.add_results.viewer.filters = ['is_image_viewer'] + # MUST be a cube + self.dataset.add_filter('layer_in_flux_viewer') else: - # only want spectral viewers in the options - self.add_results.viewer.filters = ['is_spectrum_viewer'] + # can either be a cube OR a spectrum + self.dataset.remove_filter('layer_in_flux_viewer') + self._update_viewer_filters() def vue_apply(self, event={}): self.smooth(add_data=True) diff --git a/jdaviz/configs/default/plugins/gaussian_smooth/gaussian_smooth.vue b/jdaviz/configs/default/plugins/gaussian_smooth/gaussian_smooth.vue index 85b1345d0d..08353c7c30 100644 --- a/jdaviz/configs/default/plugins/gaussian_smooth/gaussian_smooth.vue +++ b/jdaviz/configs/default/plugins/gaussian_smooth/gaussian_smooth.vue @@ -5,16 +5,6 @@ :popout_button="popout_button" :scroll_to.sync="scroll_to"> - - - + + +