Skip to content

Commit

Permalink
Change logger statements in spot finding to use f-strings to ensure l…
Browse files Browse the repository at this point in the history
…ogger info is always printed correctly.
  • Loading branch information
toastisme committed Jun 14, 2024
1 parent b7bcc78 commit a42b8dc
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 33 deletions.
1 change: 1 addition & 0 deletions newsfragments/XXX.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Change logger statements in spot finding to use f-strings to ensure logger info is always printed correctly.
8 changes: 4 additions & 4 deletions src/dials/algorithms/spot_finding/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def __call__(self, flags, **kwargs):
flags = self.run(flags, **kwargs)
num_after = flags.count(True)
logger.info(
"Filtered %d of %d spots by peak-centroid distance", num_after, num_before
f"Filtered {num_after} of {num_before} spots by peak-centroid distance"
)
return flags

Expand Down Expand Up @@ -332,7 +332,7 @@ def __call__(self, flags, **kwargs):
flags = self.run(flags, **kwargs)
num_after = flags.count(True)
logger.info(
"Filtered %d of %d spots by background gradient", num_after, num_before
f"Filtered {num_after} of {num_before} spots by background gradient"
)
return flags

Expand Down Expand Up @@ -380,7 +380,7 @@ def run(self, flags, sequence=None, observations=None, **kwargs): # noqa: U100
cutoff = hist.slot_centers()[i - 1] - 0.5 * hist.slot_width()

sel = np.column_stack(np.where(H > cutoff))
for (ix, iy) in sel:
for ix, iy in sel:
flags.set_selected(
(
(obs_x > xedges[ix])
Expand All @@ -398,7 +398,7 @@ def __call__(self, flags, **kwargs):
num_before = flags.count(True)
flags = self.run(flags, **kwargs)
num_after = flags.count(True)
logger.info("Filtered %d of %d spots by spot density", num_after, num_before)
logger.info(f"Filtered {num_after} of {num_before} spots by spot density")
return flags


Expand Down
46 changes: 17 additions & 29 deletions src/dials/algorithms/spot_finding/finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,7 @@ def __call__(self, index):
mask = tuple(m1 & m2 for m1, m2 in zip(mask, self.mask))

logger.debug(
"Number of masked pixels for image %i: %i",
index,
sum(m.count(False) for m in mask),
f"Number of masked pixels for image {index}: {sum(m.count(False) for m in mask)}",
)

# Add the images to the pixel lists
Expand Down Expand Up @@ -154,13 +152,10 @@ def __call__(self, index):
# Print some info
if self.compute_mean_background:
logger.info(
"Found %d strong pixels on image %d with average background %f",
num_strong,
frame + 1,
average_background,
f"Found {num_strong} strong pixels on image {frame + 1} with average background {average_background}",
)
else:
logger.info("Found %d strong pixels on image %d", num_strong, frame + 1)
logger.info(f"Found {num_strong} strong pixels on image {frame + 1}")

# Return the result
return pixel_list
Expand Down Expand Up @@ -295,16 +290,16 @@ def pixel_list_to_shoeboxes(
shoeboxes.extend(creator.result())
spotsizes.extend(creator.spot_size())
hp.extend(creator.hot_pixels())
logger.info("\nExtracted %d spots", len(shoeboxes))
logger.info(f"\nExtracted {len(shoeboxes)} spots")

# Get the unallocated spots and print some info
selection = shoeboxes.is_allocated()
shoeboxes = shoeboxes.select(selection)
ntoosmall = (spotsizes < min_spot_size).count(True)
ntoolarge = (spotsizes > max_spot_size).count(True)
assert ntoosmall + ntoolarge == selection.count(False)
logger.info("Removed %d spots with size < %d pixels", ntoosmall, min_spot_size)
logger.info("Removed %d spots with size > %d pixels", ntoolarge, max_spot_size)
logger.info(f"Removed {ntoosmall} spots with size < {min_spot_size} pixels")
logger.info(f"Removed {ntoolarge} spots with size > {max_spot_size} pixels")

# Return the shoeboxes
return shoeboxes, hotpixels
Expand All @@ -316,11 +311,11 @@ def shoeboxes_to_reflection_table(
"""Filter shoeboxes and create reflection table"""
# Calculate the spot centroids
centroid = shoeboxes.centroid_valid()
logger.info("Calculated %d spot centroids", len(shoeboxes))
logger.info(f"Calculated {len(shoeboxes)} spot centroids")

# Calculate the spot intensities
intensity = shoeboxes.summed_intensity()
logger.info("Calculated %d spot intensities", len(shoeboxes))
logger.info(f"Calculated {len(shoeboxes)} spot intensities")

# Create the observations
observed = flex.observation(shoeboxes.panels(), centroid, intensity)
Expand Down Expand Up @@ -459,7 +454,7 @@ def _find_spots(self, imageset):
mp_chunksize = self._compute_chunksize(
len(imageset), mp_njobs * mp_nproc, self.min_chunksize
)
logger.info("Setting chunksize=%i", mp_chunksize)
logger.info(f"Setting chunksize={mp_chunksize}")

len_by_nproc = int(math.floor(len(imageset) / (mp_njobs * mp_nproc)))
if mp_chunksize > len_by_nproc:
Expand Down Expand Up @@ -492,13 +487,10 @@ def _find_spots(self, imageset):
logger.info("Extracting strong pixels from images")
if mp_njobs > 1:
logger.info(
" Using %s with %d parallel job(s) and %d processes per node\n",
mp_method,
mp_njobs,
mp_nproc,
f" Using {mp_method} with {mp_njobs} parallel job(s) and {mp_nproc} processes per node\n"
)
else:
logger.info(" Using multiprocessing with %d parallel job(s)\n", mp_nproc)
logger.info(f" Using multiprocessing with {mp_nproc} parallel job(s)\n")
if mp_nproc > 1 or mp_njobs > 1:

def process_output(result):
Expand Down Expand Up @@ -555,7 +547,7 @@ def _find_spots_2d_no_shoeboxes(self, imageset):
mp_chunksize = self._compute_chunksize(
len(imageset), mp_njobs * mp_nproc, self.min_chunksize
)
logger.info("Setting chunksize=%i", mp_chunksize)
logger.info(f"Setting chunksize={mp_chunksize}")

len_by_nproc = int(math.floor(len(imageset) / (mp_njobs * mp_nproc)))
if mp_chunksize > len_by_nproc:
Expand Down Expand Up @@ -588,13 +580,10 @@ def _find_spots_2d_no_shoeboxes(self, imageset):
logger.info("Extracting strong spots from images")
if mp_njobs > 1:
logger.info(
" Using %s with %d parallel job(s) and %d processes per node\n",
mp_method,
mp_njobs,
mp_nproc,
f" Using {mp_method} with {mp_njobs} parallel job(s) and {mp_nproc} processes per node\n"
)
else:
logger.info(" Using multiprocessing with %d parallel job(s)\n", mp_nproc)
logger.info(f" Using multiprocessing with {mp_nproc} parallel job(s)\n")
if mp_nproc > 1 or mp_njobs > 1:

def process_output(result):
Expand Down Expand Up @@ -701,7 +690,7 @@ def find_spots(self, experiments: ExperimentList) -> flex.reflection_table:

# Find the strong spots in the sequence
logger.info(
"-" * 80 + "\nFinding strong spots in imageset %d\n" + "-" * 80, j
"-" * 80 + f"\nFinding strong spots in imageset {j}\n" + "-" * 80
)
table, hot_mask = self._find_spots_in_imageset(imageset)

Expand Down Expand Up @@ -811,7 +800,7 @@ def _find_spots_in_imageset(self, imageset):
)
)

logger.info("\nFinding spots in image %s to %s...", j0, j1)
logger.info(f"\nFinding spots in image {j0} to {j1}...")
j0 -= 1
if isinstance(imageset, ImageSequence):
j0 -= imageset.get_array_range()[0]
Expand Down Expand Up @@ -849,7 +838,7 @@ def _create_hot_mask(self, imageset, hot_pixels):
for i in range(len(hp)):
hm[hp[i]] = False
num_hot += len(hp)
logger.info("Found %d possible hot pixel(s)", num_hot)
logger.info(f"Found {num_hot} possible hot pixel(s)")

else:
hot_mask = None
Expand Down Expand Up @@ -913,7 +902,6 @@ def __init__(
self.experiments = experiments

def _correct_centroid_tof(self, reflections):

"""
Sets the centroid of the spot to the peak position along the
time of flight, as this tends to more accurately represent the true
Expand Down

0 comments on commit a42b8dc

Please sign in to comment.