Skip to content

Commit 575d364

Browse files
committed
updates to figures incl customization of montaging
right now only seg=coarse uses custom setting..
1 parent c54f523 commit 575d364

5 files changed

Lines changed: 65 additions & 72 deletions

File tree

spimquant/workflow/rules/qc.smk

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ saturation/clip fraction (percentage of voxels at the maximum bin).
4646
suffix="histogram.png",
4747
**inputs["spim"].wildcards,
4848
),
49-
threads: 8
49+
threads: 4
5050
resources:
5151
mem_mb=16000,
5252
runtime=30,
@@ -88,7 +88,7 @@ Aspect ratio is corrected using voxel spacings from ``ZarrNii.get_zooms()``.
8888
suffix="segslices.png",
8989
**inputs["spim"].wildcards,
9090
),
91-
threads: 8
91+
threads: 4
9292
resources:
9393
mem_mb=16000,
9494
runtime=30,
@@ -127,7 +127,7 @@ for isotropic display and physically correct aspect ratio.
127127
suffix="vesselslices.png",
128128
**inputs["spim"].wildcards,
129129
),
130-
threads: 8
130+
threads: 4
131131
resources:
132132
mem_mb=16000,
133133
runtime=30,
@@ -185,13 +185,14 @@ quality within individual brain regions.
185185
suffix="roimontage.png",
186186
**inputs["spim"].wildcards,
187187
),
188-
threads: 32
188+
threads: 4
189189
resources:
190190
mem_mb=32000,
191191
runtime=15,
192192
params:
193-
max_rois=25,
194-
n_cols=5,
193+
max_rois=lambda wildcards: 25 if wildcards.seg == "coarse" else 100,
194+
n_cols=lambda wildcards: 5 if wildcards.seg == "coarse" else 10,
195+
patch_size=lambda wildcards: 2000 if wildcards.seg == "coarse" else 500,
195196
level=config["segmentation_level"],
196197
script:
197198
"../scripts/qc_segmentation_roi_zoom.py"
@@ -241,13 +242,14 @@ ZarrNiiAtlas for atlas-based ROI cropping.
241242
suffix="vesselroimontage.png",
242243
**inputs["spim"].wildcards,
243244
),
244-
threads: 32
245+
threads: 4
245246
resources:
246247
mem_mb=32000,
247248
runtime=15,
248249
params:
249-
max_rois=25,
250-
n_cols=5,
250+
max_rois=lambda wildcards: 25 if wildcards.seg == "coarse" else 100,
251+
n_cols=lambda wildcards: 5 if wildcards.seg == "coarse" else 10,
252+
patch_size=lambda wildcards: 2000 if wildcards.seg == "coarse" else 500,
251253
level=config["segmentation_level"],
252254
script:
253255
"../scripts/qc_segmentation_roi_zoom.py"

spimquant/workflow/scripts/qc_intensity_histogram.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def main():
2424
hist_bins = snakemake.params.hist_bins
2525
hist_range = snakemake.params.hist_range
2626

27-
with get_dask_client(snakemake.config["dask_scheduler"], snakemake.threads):
27+
with get_dask_client("threads", snakemake.threads):
2828
znimg = ZarrNii.from_ome_zarr(
2929
snakemake.input.spim,
3030
level=level,

spimquant/workflow/scripts/qc_roi_summary.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
33
Loads the merged segmentation-statistics TSV (all stains) together with
44
the atlas label table, then produces bar-chart visualisations of the
5-
top brain-regions ranked by field fraction and count for each stain.
5+
top brain-regions ranked by field fraction and density for each stain.
66
77
This is a Snakemake script that expects the ``snakemake`` object to be
88
available, which is automatically provided when executed as part of a
@@ -19,7 +19,6 @@
1919
# Suffixes used to identify stain-prefixed metric columns in mergedsegstats TSV.
2020
# Columns follow the pattern "{stain}+{metric}", e.g. "Abeta+fieldfrac".
2121
_SUFFIX_FIELDFRAC = "+fieldfrac"
22-
_SUFFIX_COUNT = "+count"
2322
_SUFFIX_DENSITY = "+density"
2423

2524

@@ -62,15 +61,13 @@ def main():
6261

6362
# Identify stain-prefixed metric columns (pattern: "{stain}+{metric}")
6463
ff_cols = [c for c in stats_df.columns if c.endswith(_SUFFIX_FIELDFRAC)]
65-
count_cols = [c for c in stats_df.columns if c.endswith(_SUFFIX_COUNT)]
6664
density_cols = [c for c in stats_df.columns if c.endswith(_SUFFIX_DENSITY)]
6765

68-
# Determine number of rows: 1 row per metric type (ff, count, density)
66+
# Determine number of rows: 1 row per metric type (ff, density)
6967
# with one subplot per stain within each row
7068
n_ff = len(ff_cols)
71-
n_count = len(count_cols)
7269
n_density = len(density_cols)
73-
n_rows = (1 if n_ff else 0) + (1 if n_count else 0) + (1 if n_density else 0)
70+
n_rows = (1 if n_ff else 0) + (1 if n_density else 0)
7471

7572
if n_rows == 0:
7673
fig, ax = plt.subplots(figsize=(8, 4))
@@ -95,8 +92,6 @@ def main():
9592
row_specs = []
9693
if n_ff:
9794
row_specs.append(("Field Fraction (%)", ff_cols, "steelblue"))
98-
if n_count:
99-
row_specs.append(("Count (objects)", count_cols, "darkorange"))
10095
if n_density:
10196
row_specs.append(("Density (objects/vol)", density_cols, "forestgreen"))
10297

spimquant/workflow/scripts/qc_segmentation_overview.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,8 +113,8 @@ def main():
113113
for col, sl in enumerate(slice_indices):
114114
idx = [slice(None)] * 3
115115
idx[ax_idx] = int(sl)
116-
spim_sl = np.rot90(spim_norm[tuple(idx)])
117-
mask_sl = np.rot90(mask_norm[tuple(idx)])
116+
spim_sl = spim_norm[tuple(idx)]
117+
mask_sl = mask_norm[tuple(idx)]
118118

119119
ax = axes[row, col]
120120
ax.imshow(spim_sl, cmap="gray", vmin=0, vmax=1, aspect=aspect)

spimquant/workflow/scripts/qc_segmentation_roi_zoom.py

Lines changed: 48 additions & 52 deletions
Original file line numberDiff line numberDiff line change
@@ -23,47 +23,23 @@
2323
from scipy.ndimage import zoom
2424

2525

26-
def _percentile_norm(arr, pct_low=1, pct_high=99):
27-
"""Percentile-normalise *arr* to the range [0, 1] using global statistics."""
26+
def _estimate_global_percentiles(
27+
arr,
28+
pct_low=1,
29+
pct_high=99,
30+
):
31+
"""
32+
Estimate percentile normalization bounds from an image
33+
"""
2834
lo = np.percentile(arr, pct_low)
2935
hi = np.percentile(arr, pct_high)
30-
if hi > lo:
31-
return np.clip((arr.astype(float) - lo) / (hi - lo), 0.0, 1.0)
32-
return np.zeros_like(arr, dtype=float)
33-
34-
35-
def _match_shape(source, target_shape, order=1):
36-
"""Zoom *source* array to *target_shape* if shapes differ."""
37-
if source.shape == target_shape:
38-
return source
39-
factors = [t / s for t, s in zip(target_shape, source.shape)]
40-
return zoom(source, factors, order=order)
41-
36+
return float(lo), float(hi)
4237

43-
def _select_best_z_slice(ff_crop):
44-
"""Return the Z-index with the most field-fraction signal.
4538

46-
Falls back to the central slice when no signal is present.
47-
"""
48-
ff_per_z = ff_crop.sum(axis=(0, 1))
49-
if ff_per_z.max() > 0:
50-
return int(ff_per_z.argmax())
51-
return ff_crop.shape[2] // 2
52-
53-
54-
def _get_bounding_box(mask, pad=5):
55-
"""Return index slices for the bounding box of a boolean *mask* with padding.
56-
57-
Returns ``None`` when the mask is empty.
58-
"""
59-
indices = np.where(mask)
60-
if not indices[0].size:
61-
return None
62-
shape = mask.shape
63-
return tuple(
64-
slice(max(0, int(idx.min()) - pad), min(sz, int(idx.max()) + pad + 1))
65-
for idx, sz in zip(indices, shape)
66-
)
39+
def _apply_fixed_percentile_norm(arr, lo, hi):
40+
if hi > lo:
41+
return np.clip((arr.astype(np.float32) - lo) / (hi - lo), 0.0, 1.0)
42+
return np.zeros_like(arr, dtype=np.float32)
6743

6844

6945
def main():
@@ -73,20 +49,33 @@ def main():
7349
max_rois = snakemake.params.max_rois
7450
n_cols = snakemake.params.n_cols
7551

76-
spim_img = ZarrNii.from_ome_zarr(snakemake.input.spim,level=snakemake.params.level, downsample_near_isotropic=True,channel_labels=[snakemake.wildcards.stain])
77-
mask_img = ZarrNii.from_ome_zarr(snakemake.input.mask,level=0)
78-
79-
atlas = ZarrNiiAtlas.from_files(snakemake.input.dseg_nii,snakemake.input.label_tsv)
52+
spim_img = ZarrNii.from_ome_zarr(
53+
snakemake.input.spim,
54+
level=snakemake.params.level,
55+
downsample_near_isotropic=True,
56+
channel_labels=[snakemake.wildcards.stain],
57+
)
58+
mask_img = ZarrNii.from_ome_zarr(snakemake.input.mask, level=0)
8059

81-
dseg_data = atlas.dseg.data.compute()
60+
atlas = ZarrNiiAtlas.from_files(snakemake.input.dseg_nii, snakemake.input.label_tsv)
8261

62+
dseg_data = atlas.dseg.data.compute()
8363

8464
# Voxel dimensions (mm) for physical aspect-ratio correction - not implemented yet
85-
# but should be easy with ZarrNii image .scale
65+
# but should be easy with ZarrNii image .scale
8666
aspect_axial = 1
8767

68+
spim_img_ds = ZarrNii.from_ome_zarr(
69+
snakemake.input.spim,
70+
level=(int(snakemake.params.level) + 5),
71+
downsample_near_isotropic=True,
72+
channel_labels=[snakemake.wildcards.stain],
73+
)
8874

89-
# Global normalisations - not implemented yet
75+
# estimate once globally, from a coarse version of the full image
76+
glob_lo, glob_hi = _estimate_global_percentiles(
77+
spim_img_ds.data.compute(), pct_low=1, pct_high=99
78+
)
9079

9180
# Load atlas label table
9281
label_df = atlas.labels_df
@@ -145,20 +134,27 @@ def main():
145134
label_id = int(row["index"])
146135
label_name = str(row.get("name", label_id))
147136

148-
149-
#get cropped images for this label
137+
# get cropped images for this label
150138
bbox_min, bbox_max = atlas.get_region_bounding_box(region_ids=label_id)
151139
center_coord = tuple((x + y) / 2 for x, y in zip(bbox_min, bbox_max))
152-
spim_crop = spim_img.crop_centered(center_coord, patch_size=(2000,2000,1))
153-
mask_crop = mask_img.crop_centered(center_coord, patch_size=(2000,2000,1))
154-
140+
spim_crop = spim_img.crop_centered(
141+
center_coord,
142+
patch_size=(snakemake.params.patch_size, snakemake.params.patch_size, 1),
143+
)
144+
mask_crop = mask_img.crop_centered(
145+
center_coord,
146+
patch_size=(snakemake.params.patch_size, snakemake.params.patch_size, 1),
147+
)
155148

156-
spim_sl = np.rot90(spim_crop.data[0, :, :].squeeze().compute())
157-
mask_sl = np.rot90(mask_crop.data[0, :, :].squeeze().compute())
149+
spim_sl = spim_crop.data[0, :, :].squeeze().compute()
150+
spim_sl = _apply_fixed_percentile_norm(spim_sl, glob_lo, glob_hi)
151+
mask_sl = mask_crop.data[0, :, :].squeeze().compute()
158152

159153
ax.imshow(spim_sl, cmap="gray")
160154
mask_masked = np.ma.masked_where(mask_sl < 100, mask_sl)
161-
ax.imshow(mask_masked, cmap="spring", alpha=0.6, vmin=0, vmax=100, aspect=aspect_axial)
155+
ax.imshow(
156+
mask_masked, cmap="spring", alpha=0.6, vmin=0, vmax=100, aspect=aspect_axial
157+
)
162158
ax.set_title(label_name, fontsize=7, pad=2)
163159
ax.set_xticks([])
164160
ax.set_yticks([])

0 commit comments

Comments
 (0)