Skip to content

Commit

Permalink
rename aux_dict -> auxiliary_images
Browse files Browse the repository at this point in the history
Depends on #193
  • Loading branch information
Tony Tung authored and ttung committed Jun 1, 2018
1 parent a415ef3 commit aae6ab5
Show file tree
Hide file tree
Showing 15 changed files with 95 additions and 1,361 deletions.
12 changes: 6 additions & 6 deletions notebooks/ISS_Pipeline_-_Breast_-_1_FOV.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@
"metadata": {},
"outputs": [],
"source": [
"image(s.aux_dict['dots'])"
"image(s.auxiliary_images['dots'])"
]
},
{
Expand All @@ -104,7 +104,7 @@
"metadata": {},
"outputs": [],
"source": [
"image(s.aux_dict['nuclei'])"
"image(s.auxiliary_images['nuclei'])"
]
},
{
Expand Down Expand Up @@ -156,7 +156,7 @@
"\n",
"# filter 'dots' auxiliary file\n",
"print(\"filtering dots\")\n",
"dots_filt = white_top_hat(s.aux_dict['dots'], disk_size)\n",
"dots_filt = white_top_hat(s.auxiliary_images['dots'], disk_size)\n",
"\n",
"# convert the unstacked data back into a tensor\n",
"s.set_stack(s.un_squeeze(stack_filt))\n",
Expand Down Expand Up @@ -385,7 +385,7 @@
"stain = stain/stain.max()\n",
"\n",
"\n",
"seg = WatershedSegmenter(s.aux_dict['nuclei'], stain) # uses skimage watershed. \n",
"seg = WatershedSegmenter(s.auxiliary_images['nuclei'], stain) # uses skimage watershed.\n",
"cells_labels = seg.segment(dapi_thresh, stain_thresh, size_lim, disk_size_markers, disk_size_mask, min_dist)\n",
"seg.show()"
]
Expand All @@ -412,8 +412,8 @@
"results = pd.merge(res, p.spots_df_viz, on='spot_id', how='left')\n",
"\n",
"rgb = np.zeros(s.image.tile_shape + (3,))\n",
"rgb[:,:,0] = s.aux_dict['nuclei']\n",
"rgb[:,:,1] = s.aux_dict['dots']\n",
"rgb[:,:,0] = s.auxiliary_images['nuclei']\n",
"rgb[:,:,1] = s.auxiliary_images['dots']\n",
"do = rgb2gray(rgb)\n",
"do = do/(do.max())\n",
"\n",
Expand Down
12 changes: 6 additions & 6 deletions notebooks/ISS_Pipeline_-_Breast_-_1_FOV.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,15 +56,15 @@
# EPY: END markdown

# EPY: START code
image(s.aux_dict['dots'])
image(s.auxiliary_images['dots'])
# EPY: END code

# EPY: START markdown
# Below is a DAPI auxiliary image, which specifically marks nuclei. This is useful cell segmentation later on in the processing.
# EPY: END markdown

# EPY: START code
image(s.aux_dict['nuclei'])
image(s.auxiliary_images['nuclei'])
# EPY: END code

# EPY: START markdown
Expand Down Expand Up @@ -97,7 +97,7 @@

# filter 'dots' auxiliary file
print("filtering dots")
dots_filt = white_top_hat(s.aux_dict['dots'], disk_size)
dots_filt = white_top_hat(s.auxiliary_images['dots'], disk_size)

# convert the unstacked data back into a tensor
s.set_stack(s.un_squeeze(stack_filt))
Expand Down Expand Up @@ -249,7 +249,7 @@
stain = stain/stain.max()


seg = WatershedSegmenter(s.aux_dict['nuclei'], stain) # uses skimage watershed.
seg = WatershedSegmenter(s.auxiliary_images['nuclei'], stain) # uses skimage watershed.
cells_labels = seg.segment(dapi_thresh, stain_thresh, size_lim, disk_size_markers, disk_size_mask, min_dist)
seg.show()
# EPY: END code
Expand All @@ -268,8 +268,8 @@
results = pd.merge(res, p.spots_df_viz, on='spot_id', how='left')

rgb = np.zeros(s.image.tile_shape + (3,))
rgb[:,:,0] = s.aux_dict['nuclei']
rgb[:,:,1] = s.aux_dict['dots']
rgb[:,:,0] = s.auxiliary_images['nuclei']
rgb[:,:,1] = s.auxiliary_images['dots']
do = rgb2gray(rgb)
do = do/(do.max())

Expand Down
707 changes: 37 additions & 670 deletions notebooks/ISS_Simple_tutorial_-_Mouse_vs._Human_Fibroblasts.ipynb

Large diffs are not rendered by default.

20 changes: 10 additions & 10 deletions notebooks/ISS_Simple_tutorial_-_Mouse_vs._Human_Fibroblasts.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
# EPY: END code

# EPY: START code
image(s.aux_dict['dots'], size=10)
image(s.auxiliary_images['dots'], size=10)
# EPY: END code

# EPY: START markdown
Expand Down Expand Up @@ -68,7 +68,7 @@
stack_filt = s.un_squeeze(stack_filt)

# filter dots
dots_filt = white_top_hat(s.aux_dict['dots'], disk_dize)
dots_filt = white_top_hat(s.auxiliary_images['dots'], disk_dize)

# create a 'stain' for segmentation
stain = np.mean(s.max_proj('ch'), axis=0)
Expand All @@ -81,8 +81,8 @@

# visualize
tile(s.squeeze(), bar=False, size=10);
image(s.aux_dict['dots'])
image(s.aux_dict['stain'])
image(s.auxiliary_images['dots'])
image(s.auxiliary_images['stain'])
# EPY: END code

# EPY: START markdown
Expand Down Expand Up @@ -126,7 +126,7 @@
disk_size_mask = None
min_dist = 57

seg = WatershedSegmenter(s.aux_dict['dapi'], s.aux_dict['stain'])
seg = WatershedSegmenter(s.auxiliary_images['dapi'], s.auxiliary_images['stain'])
cells_labels = seg.segment(dapi_thresh, stain_thresh, size_lim, disk_size_markers, disk_size_mask, min_dist)
seg.show()
# EPY: END code
Expand Down Expand Up @@ -174,11 +174,11 @@
dec_filt = pd.merge(dec, spots_viz, on='spot_id',how='left')
dec_filt = dec_filt[dec_filt.qual>.25]

assert s.aux_dict['dapi'].shape == s.aux_dict['dots'].shape
assert s.auxiliary_images['dapi'].shape == s.auxiliary_images['dots'].shape

rgb = np.zeros(s.aux_dict['dapi'].shape + (3,))
rgb[:,:,0] = s.aux_dict['dapi']
rgb[:,:,1] = s.aux_dict['dots']
rgb = np.zeros(s.auxiliary_images['dapi'].shape + (3,))
rgb[:,:,0] = s.auxiliary_images['dapi']
rgb[:,:,1] = s.auxiliary_images['dots']
do = rgb2gray(rgb)
do = do/(do.max())

Expand All @@ -193,7 +193,7 @@
v = pd.merge(spots_viz, ass, on='spot_id')

r = label_to_regions(cells_labels)
im = r.mask(background=[0.9, 0.9, 0.9], dims=s.aux_dict['dots'].shape, stroke=None, cmap='rainbow')
im = r.mask(background=[0.9, 0.9, 0.9], dims=s.auxiliary_images['dots'].shape, stroke=None, cmap='rainbow')
image(im,size=10)

v_ass = v[~v.cell_id.isnull()]
Expand Down
659 changes: 13 additions & 646 deletions notebooks/Starfish_simulation.ipynb

Large diffs are not rendered by default.

14 changes: 7 additions & 7 deletions starfish/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def __init__(self):
self.image = None

# auxiliary images
self.aux_dict = dict()
self.auxiliary_images = dict()

# readers and writers
self.write_fn = np.save # asserted for now
Expand All @@ -41,7 +41,7 @@ def read(self, in_json_path_or_url):

def _read_aux(self):
for aux_key, aux_data in self.org['auxiliary_images'].items():
self.aux_dict[aux_key] = ImageStack.from_url(aux_data, self.baseurl)
self.auxiliary_images[aux_key] = ImageStack.from_url(aux_data, self.baseurl)

@classmethod
def from_experiment_json(cls, json_url: str):
Expand Down Expand Up @@ -79,7 +79,7 @@ def _write_stack(self, dir_name):

def _write_aux(self, dir_name):
for aux_key, aux_data in self.org['auxiliary_images'].items():
self.aux_dict[aux_key].write(os.path.join(dir_name, aux_data))
self.auxiliary_images[aux_key].write(os.path.join(dir_name, aux_data))

def set_stack(self, new_stack):
if self.image.raw_shape != new_stack.shape:
Expand All @@ -89,13 +89,13 @@ def set_stack(self, new_stack):
self.image.numpy_array = new_stack

def set_aux(self, key, img):
if key in self.aux_dict:
old_img = self.aux_dict[key]
if key in self.auxiliary_images:
old_img = self.auxiliary_images[key]
if old_img.shape != img.shape:
msg = "Shape mismatch. Current data shape: {}, new data shape: {}".format(
old_img.shape, img.shape)
raise AttributeError(msg)
self.aux_dict[key].numpy_array = img
self.auxiliary_images[key].numpy_array = img
else:
# TODO: (ttung) major hack alert. we don't have a convenient mechanism to build an ImageStack from a single
# numpy array, which we probably should.
Expand Down Expand Up @@ -128,7 +128,7 @@ def set_aux(self, key, img):
tile.numpy_array = img
tileset.add_tile(tile)

self.aux_dict[key] = ImageStack(tileset)
self.auxiliary_images[key] = ImageStack(tileset)
self.org['auxiliary_images'][key] = f"{key}.json"

def max_proj(self, *dims):
Expand Down
4 changes: 2 additions & 2 deletions starfish/pipeline/features/spots/detector/gaussian.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(self, min_sigma, max_sigma, num_sigma, threshold, blobs_image_name,
than thresh are ignored. Reduce this to detect blobs with less
intensities.
blobs_image_name : str
name of the image containing blobs. Must be present in the aux_dict of the Stack passed to `find`
name of the image containing blobs. Must be present in the auxiliary_images of the Stack passed to `find`
measurement_type : str ['max', 'mean']
name of the function used to calculate the intensity for each identified spot area
Expand Down Expand Up @@ -97,7 +97,7 @@ def fit(self, blobs_image):
return SpotAttributes(fitted_blobs)

def find(self, image_stack) -> Tuple[SpotAttributes, EncodedSpots]:
blobs = image_stack.aux_dict[self.blobs].max_proj(Indices.HYB, Indices.CH, Indices.Z)
blobs = image_stack.auxiliary_images[self.blobs].max_proj(Indices.HYB, Indices.CH, Indices.Z)
spot_attributes = self.fit(blobs)
encoded_spots = self.encode(image_stack, spot_attributes.data)
return spot_attributes, encoded_spots
Expand Down
4 changes: 2 additions & 2 deletions starfish/pipeline/filter/bandpass.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,5 +67,5 @@ def filter(self, stack) -> None:
stack.image.apply(bandpass_)

# apply to aux dict too:
for k, val in stack.aux_dict.items():
stack.aux_dict[k].apply(bandpass_)
for auxiliary_image in stack.auxiliary_images.values():
auxiliary_image.apply(bandpass_)
4 changes: 2 additions & 2 deletions starfish/pipeline/filter/clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,5 +74,5 @@ def filter(self, stack) -> None:
stack.image.apply(clip)

# apply to aux dict too:
for k, val in stack.aux_dict.items():
stack.aux_dict[k].apply(clip)
for auxiliary_image in stack.auxiliary_images.values():
auxiliary_image.apply(clip)
4 changes: 2 additions & 2 deletions starfish/pipeline/filter/gaussian_high_pass.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,5 +72,5 @@ def filter(self, stack: Stack) -> None:
stack.image.apply(high_pass)

# apply to aux dict too:
for k, val in stack.aux_dict.items():
stack.aux_dict[k].apply(high_pass)
for auxiliary_image in stack.auxiliary_images.values():
auxiliary_image.apply(high_pass)
4 changes: 2 additions & 2 deletions starfish/pipeline/filter/gaussian_low_pass.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,5 +69,5 @@ def filter(self, stack) -> None:
stack.image.apply(low_pass)

# apply to aux dict too:
for k, val in stack.aux_dict.items():
stack.aux_dict[k].apply(low_pass)
for auxiliary_image in stack.auxiliary_images.values():
auxiliary_image.apply(low_pass)
4 changes: 2 additions & 2 deletions starfish/pipeline/filter/richardson_lucy_deconvolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,5 +87,5 @@ def filter(self, stack: Stack) -> None:
func: Callable = partial(self.richardson_lucy_deconv, num_iter=self.num_iter, psf=self.psf, clip=self.clip)
stack.image.apply(func)

for k, val in stack.aux_dict.items():
stack.aux_dict[k].apply(func)
for auxiliary_image in stack.auxiliary_images.values():
auxiliary_image.apply(func)
4 changes: 2 additions & 2 deletions starfish/pipeline/filter/white_tophat.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,5 +53,5 @@ def white_tophat(image):
stack.image.apply(white_tophat)

# apply to aux dict too.
for aux_img in stack.aux_dict.values():
aux_img.apply(white_tophat)
for auxiliary_image in stack.auxiliary_images.values():
auxiliary_image.apply(white_tophat)
2 changes: 1 addition & 1 deletion starfish/pipeline/registration/fourier_shift.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def add_arguments(cls, group_parser):
def register(self, stack):
# TODO: (ambrosejcarr) is this the appropriate way of dealing with Z in registration?
mp = stack.max_proj(Indices.CH, Indices.Z)
dots = stack.aux_dict['dots'].max_proj(Indices.HYB, Indices.CH, Indices.Z)
dots = stack.auxiliary_images['dots'].max_proj(Indices.HYB, Indices.CH, Indices.Z)

for h in range(stack.image.num_hybs):
# compute shift between maximum projection (across channels) and dots, for each hyb round
Expand Down
2 changes: 1 addition & 1 deletion starfish/pipeline/segmentation/watershed.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def segment(self, stack):
disk_size_markers = None
disk_size_mask = None

nuclei = stack.aux_dict['nuclei'].max_proj(Indices.HYB, Indices.CH, Indices.Z)
nuclei = stack.auxiliary_images['nuclei'].max_proj(Indices.HYB, Indices.CH, Indices.Z)
seg = WatershedSegmenter(nuclei, stain)
cells_labels = seg.segment(
self.dapi_threshold, self.input_threshold, size_lim, disk_size_markers, disk_size_mask, self.min_distance)
Expand Down

0 comments on commit aae6ab5

Please sign in to comment.