Skip to content

Commit f8371f2

Browse files
authored
Reorganize tests for readability (#47)
1 parent d2eae5d commit f8371f2

9 files changed

Lines changed: 1129 additions & 1157 deletions

test/test_cifti_cli.py

Lines changed: 1021 additions & 143 deletions
Large diffs are not rendered by default.

test/test_cifti_cli_comprehensive.py

Lines changed: 0 additions & 926 deletions
This file was deleted.

test/test_cifti_utils.py

Lines changed: 30 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,47 +1,13 @@
1-
"""Unit tests for CIFTI validation helpers."""
1+
"""Unit tests for CIFTI utility helpers."""
22

33
from __future__ import annotations
44

5-
import nibabel as nb
65
import numpy as np
76
import pytest
8-
from nibabel.cifti2.cifti2_axes import BrainModelAxis, ParcelsAxis, ScalarAxis
7+
from nibabel.cifti2.cifti2_axes import ScalarAxis
8+
from utils import make_dscalar, make_parcels_axis, make_pconn, make_pscalar
99

10-
from modelarrayio.utils.cifti import extract_cifti_scalar_data
11-
12-
13-
def _make_scalar_cifti(mask_bool: np.ndarray, values: np.ndarray) -> nb.Cifti2Image:
14-
scalar_axis = ScalarAxis(['synthetic'])
15-
brain_axis = BrainModelAxis.from_mask(mask_bool)
16-
header = nb.cifti2.Cifti2Header.from_axes((scalar_axis, brain_axis))
17-
return nb.Cifti2Image(values.reshape(1, -1).astype(np.float32), header=header)
18-
19-
20-
def _make_parcels_axis(parcel_names: list[str]) -> ParcelsAxis:
21-
"""Create a minimal surface-only ParcelsAxis for testing."""
22-
# One vertex per parcel on the left cortex
23-
n = len(parcel_names)
24-
nvertices = {'CIFTI_STRUCTURE_CORTEX_LEFT': n}
25-
vox_dtype = np.dtype([('ijk', '<i4', (3,))])
26-
voxels = [np.array([], dtype=vox_dtype) for _ in range(n)]
27-
vertices = [{'CIFTI_STRUCTURE_CORTEX_LEFT': np.array([i], dtype=np.int32)} for i in range(n)]
28-
affine = np.eye(4)
29-
volume_shape = (10, 10, 10)
30-
return ParcelsAxis(parcel_names, voxels, vertices, affine, volume_shape, nvertices)
31-
32-
33-
def _make_pscalar_cifti(parcel_names: list[str], values: np.ndarray) -> nb.Cifti2Image:
34-
scalar_axis = ScalarAxis(['synthetic'])
35-
parcels_axis = _make_parcels_axis(parcel_names)
36-
header = nb.cifti2.Cifti2Header.from_axes((scalar_axis, parcels_axis))
37-
return nb.Cifti2Image(values.reshape(1, -1).astype(np.float32), header=header)
38-
39-
40-
def _make_pconn_cifti(parcel_names: list[str], values: np.ndarray) -> nb.Cifti2Image:
41-
parcels_axis = _make_parcels_axis(parcel_names)
42-
header = nb.cifti2.Cifti2Header.from_axes((parcels_axis, parcels_axis))
43-
n = len(parcel_names)
44-
return nb.Cifti2Image(values.reshape(n, n).astype(np.float32), header=header)
10+
from modelarrayio.utils.cifti import brain_names_to_dataframe, extract_cifti_scalar_data
4511

4612

4713
class _FakeHeader:
@@ -66,7 +32,7 @@ def test_extract_cifti_scalar_data_returns_data_and_names() -> None:
6632
mask = np.zeros((2, 2, 2), dtype=bool)
6733
mask[0, 0, 0] = True
6834
mask[1, 1, 1] = True
69-
image = _make_scalar_cifti(mask, np.array([1.0, 2.0], dtype=np.float32))
35+
image = make_dscalar(mask, np.array([1.0, 2.0], dtype=np.float32))
7036

7137
data, names = extract_cifti_scalar_data(image)
7238

@@ -95,7 +61,7 @@ def test_extract_cifti_scalar_data_rejects_inconsistent_reference_names() -> Non
9561
mask = np.zeros((2, 2, 2), dtype=bool)
9662
mask[0, 0, 0] = True
9763
mask[1, 1, 1] = True
98-
image = _make_scalar_cifti(mask, np.array([1.0, 2.0], dtype=np.float32))
64+
image = make_dscalar(mask, np.array([1.0, 2.0], dtype=np.float32))
9965

10066
with pytest.raises(ValueError, match='Inconsistent greyordinate names'):
10167
extract_cifti_scalar_data(image, reference_brain_names=np.array(['wrong', 'names']))
@@ -104,7 +70,7 @@ def test_extract_cifti_scalar_data_rejects_inconsistent_reference_names() -> Non
10470
def test_extract_cifti_scalar_data_pscalar_returns_data_and_names() -> None:
10571
parcel_names = ['parcel_A', 'parcel_B', 'parcel_C']
10672
values = np.array([1.0, 2.0, 3.0], dtype=np.float32)
107-
image = _make_pscalar_cifti(parcel_names, values)
73+
image = make_pscalar(parcel_names, values)
10874

10975
data, names = extract_cifti_scalar_data(image)
11076

@@ -115,7 +81,7 @@ def test_extract_cifti_scalar_data_pscalar_returns_data_and_names() -> None:
11581
def test_extract_cifti_scalar_data_pscalar_validates_reference_names() -> None:
11682
parcel_names = ['parcel_A', 'parcel_B', 'parcel_C']
11783
values = np.array([1.0, 2.0, 3.0], dtype=np.float32)
118-
image = _make_pscalar_cifti(parcel_names, values)
84+
image = make_pscalar(parcel_names, values)
11985

12086
with pytest.raises(ValueError, match='Inconsistent parcel names'):
12187
extract_cifti_scalar_data(image, reference_brain_names=np.array(['X', 'Y', 'Z']))
@@ -125,7 +91,7 @@ def test_extract_cifti_scalar_data_pconn_flattens_matrix() -> None:
12591
parcel_names = ['parcel_A', 'parcel_B']
12692
n = len(parcel_names)
12793
matrix = np.arange(n * n, dtype=np.float32).reshape(n, n)
128-
image = _make_pconn_cifti(parcel_names, matrix)
94+
image = make_pconn(parcel_names, matrix)
12995

13096
data, names = extract_cifti_scalar_data(image)
13197

@@ -140,7 +106,7 @@ def test_extract_cifti_scalar_data_pconn_validates_reference_names() -> None:
140106
parcel_names = ['parcel_A', 'parcel_B']
141107
n = len(parcel_names)
142108
matrix = np.zeros((n, n), dtype=np.float32)
143-
image = _make_pconn_cifti(parcel_names, matrix)
109+
image = make_pconn(parcel_names, matrix)
144110

145111
# Get the correct element_names first
146112
_, element_names = extract_cifti_scalar_data(image)
@@ -150,3 +116,23 @@ def test_extract_cifti_scalar_data_pconn_validates_reference_names() -> None:
150116
bad_names[0] = 'wrong'
151117
with pytest.raises(ValueError, match='Inconsistent parcel names'):
152118
extract_cifti_scalar_data(image, reference_brain_names=bad_names)
119+
120+
121+
def test_make_parcels_axis_produces_valid_axis() -> None:
122+
"""Smoke test: make_parcels_axis should return a ParcelsAxis with correct length."""
123+
from nibabel.cifti2.cifti2_axes import ParcelsAxis
124+
125+
names = ['A', 'B', 'C']
126+
axis = make_parcels_axis(names)
127+
assert isinstance(axis, ParcelsAxis)
128+
assert len(axis) == len(names)
129+
130+
131+
def test_brain_names_to_dataframe() -> None:
132+
names = np.array(['CORTEX_LEFT', 'CORTEX_LEFT', 'CORTEX_RIGHT'])
133+
gdf, struct_strings = brain_names_to_dataframe(names)
134+
assert len(gdf) == 3
135+
assert 'vertex_id' in gdf.columns
136+
assert 'structure_id' in gdf.columns
137+
assert gdf['vertex_id'].tolist() == [0, 1, 2]
138+
assert len(struct_strings) == 2 # factorize unique structures

test/test_mif_cli.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
"""Tests for the mif-to-h5 CLI command."""
2+
3+
from __future__ import annotations
4+
15
import csv
26

37
import h5py
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Unit tests for fixel utility error handling."""
1+
"""Unit tests for MIF/fixel utility helpers."""
22

33
from __future__ import annotations
44

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
1-
"""Tests for CIFTI cohort normalization and greyordinate helpers."""
1+
"""Unit tests for modelarrayio.utils.misc cohort and scalar-source helpers."""
22

33
from __future__ import annotations
44

5-
import numpy as np
65
import pandas as pd
76
import pytest
87

9-
from modelarrayio.utils.cifti import brain_names_to_dataframe
108
from modelarrayio.utils.misc import build_scalar_sources, cohort_to_long_dataframe
119

1210

@@ -74,13 +72,3 @@ def test_build_scalar_sources_ordering() -> None:
7472
assert list(src.keys()) == ['A', 'B']
7573
assert src['A'] == ['x1', 'x2']
7674
assert src['B'] == ['y1']
77-
78-
79-
def test_brain_names_to_dataframe() -> None:
80-
names = np.array(['CORTEX_LEFT', 'CORTEX_LEFT', 'CORTEX_RIGHT'])
81-
gdf, struct_strings = brain_names_to_dataframe(names)
82-
assert len(gdf) == 3
83-
assert 'vertex_id' in gdf.columns
84-
assert 'structure_id' in gdf.columns
85-
assert gdf['vertex_id'].tolist() == [0, 1, 2]
86-
assert len(struct_strings) == 2 # factorize unique structures

test/test_voxels_cli.py

Lines changed: 20 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,26 @@
1+
"""Tests for the nifti-to-h5 and h5-to-nifti CLI commands."""
2+
3+
from __future__ import annotations
4+
15
import csv
6+
import logging
27
import os.path as op
38

49
import h5py
510
import nibabel as nb
611
import numpy as np
712
import pytest
813
import tiledb
14+
from utils import make_nifti
915

1016
from modelarrayio.cli.main import main as modelarrayio_main
1117

1218

13-
def _make_nifti(data, affine=None):
14-
if affine is None:
15-
affine = np.eye(4)
16-
return nb.Nifti1Image(data.astype(np.float32), affine)
17-
18-
1919
def _ijk_value(i, j, k):
2020
return i * 100.0 + j * 10.0 + k * 1.0
2121

2222

23-
def test_convoxel_cli_creates_expected_hdf5(tmp_path, monkeypatch):
23+
def test_nifti_to_h5_creates_expected_hdf5(tmp_path, monkeypatch):
2424
# Small synthetic volume
2525
shape = (5, 6, 7)
2626
group_mask = np.zeros(shape, dtype=bool)
@@ -30,7 +30,7 @@ def test_convoxel_cli_creates_expected_hdf5(tmp_path, monkeypatch):
3030
group_mask[i, j, k] = True
3131

3232
# Save group mask
33-
group_mask_img = _make_nifti(group_mask.astype(np.uint8))
33+
group_mask_img = make_nifti(group_mask.astype(np.uint8))
3434
group_mask_file = tmp_path / 'group_mask.nii.gz'
3535
group_mask_img.to_filename(group_mask_file)
3636

@@ -48,8 +48,8 @@ def test_convoxel_cli_creates_expected_hdf5(tmp_path, monkeypatch):
4848
omit = true_coords[1]
4949
indiv_mask[omit] = False
5050

51-
scalar_img = _make_nifti(scalar)
52-
mask_img = _make_nifti(indiv_mask.astype(np.uint8))
51+
scalar_img = make_nifti(scalar)
52+
mask_img = make_nifti(indiv_mask.astype(np.uint8))
5353

5454
scalar_path = tmp_path / f'sub-{sidx + 1}_scalar.nii.gz'
5555
mask_path = tmp_path / f'sub-{sidx + 1}_mask.nii.gz'
@@ -144,15 +144,15 @@ def test_convoxel_cli_creates_expected_hdf5(tmp_path, monkeypatch):
144144
assert np.isclose(v1, expected_s1, equal_nan=True)
145145

146146

147-
def test_h5_to_nifti_cli_writes_results_with_dataset_column_names(tmp_path):
147+
def test_h5_to_nifti_writes_results_with_dataset_column_names(tmp_path):
148148
shape = (3, 3, 3)
149149
group_mask = np.zeros(shape, dtype=bool)
150150
true_coords = [(0, 0, 0), (1, 1, 1), (2, 2, 2)]
151151
for coord in true_coords:
152152
group_mask[coord] = True
153153

154154
group_mask_file = tmp_path / 'group_mask.nii.gz'
155-
_make_nifti(group_mask.astype(np.uint8)).to_filename(group_mask_file)
155+
make_nifti(group_mask.astype(np.uint8)).to_filename(group_mask_file)
156156

157157
in_file = tmp_path / 'results.h5'
158158
with h5py.File(in_file, 'w') as h5:
@@ -215,12 +215,12 @@ def test_nifti_to_h5_scalar_columns_writes_prefixed_outputs(tmp_path, monkeypatc
215215
group_mask[i, j, k] = True
216216

217217
group_mask_file = tmp_path / 'group_mask.nii.gz'
218-
_make_nifti(group_mask.astype(np.uint8)).to_filename(group_mask_file)
218+
make_nifti(group_mask.astype(np.uint8)).to_filename(group_mask_file)
219219

220220
rows = []
221221
for sidx in range(2):
222222
subj_mask_file = tmp_path / f'sub-{sidx + 1}_mask.nii.gz'
223-
_make_nifti(group_mask.astype(np.uint8)).to_filename(subj_mask_file)
223+
make_nifti(group_mask.astype(np.uint8)).to_filename(subj_mask_file)
224224

225225
alpha_data = np.zeros(shape, dtype=np.float32)
226226
beta_data = np.zeros(shape, dtype=np.float32)
@@ -230,8 +230,8 @@ def test_nifti_to_h5_scalar_columns_writes_prefixed_outputs(tmp_path, monkeypatc
230230

231231
alpha_file = tmp_path / f'sub-{sidx + 1}_alpha.nii.gz'
232232
beta_file = tmp_path / f'sub-{sidx + 1}_beta.nii.gz'
233-
_make_nifti(alpha_data).to_filename(alpha_file)
234-
_make_nifti(beta_data).to_filename(beta_file)
233+
make_nifti(alpha_data).to_filename(alpha_file)
234+
make_nifti(beta_data).to_filename(beta_file)
235235

236236
rows.append(
237237
{
@@ -293,7 +293,7 @@ def _build_nifti_cohort(tmp_path):
293293
for coord in true_coords:
294294
group_mask[coord] = 1
295295
group_mask_file = tmp_path / 'group_mask.nii.gz'
296-
_make_nifti(group_mask).to_filename(group_mask_file)
296+
make_nifti(group_mask).to_filename(group_mask_file)
297297

298298
rows = []
299299
for sidx in range(2):
@@ -302,8 +302,8 @@ def _build_nifti_cohort(tmp_path):
302302
scalar[i, j, k] = float(i + j + k + sidx)
303303
scalar_file = tmp_path / f'sub-{sidx + 1}_scalar.nii.gz'
304304
mask_file = tmp_path / f'sub-{sidx + 1}_mask.nii.gz'
305-
_make_nifti(scalar).to_filename(scalar_file)
306-
_make_nifti(group_mask).to_filename(mask_file)
305+
make_nifti(scalar).to_filename(scalar_file)
306+
make_nifti(group_mask).to_filename(mask_file)
307307
rows.append(
308308
{
309309
'scalar_name': 'FA',
@@ -321,15 +321,13 @@ def _build_nifti_cohort(tmp_path):
321321
return group_mask_file, cohort_csv
322322

323323

324-
def test_nifti_tiledb_fails_when_output_already_exists(tmp_path, monkeypatch, caplog):
324+
def test_nifti_tiledb_removes_existing_arrays_on_rerun(tmp_path, monkeypatch, caplog):
325325
"""Regression test for https://github.com/PennLINC/ModelArrayIO/issues/39.
326326
327327
The TileDB backend should succeed when the output directory already contains
328328
arrays from a previous run, removing and recreating them, and should emit a
329329
warning for each removed array.
330330
"""
331-
import logging
332-
333331
group_mask_file, cohort_csv = _build_nifti_cohort(tmp_path)
334332
out_tdb = tmp_path / 'out.tdb'
335333
monkeypatch.chdir(tmp_path)

test/test_voxels_utils.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,6 @@
99
from modelarrayio.utils.nifti import flattened_image
1010

1111

12-
def _eye_affine():
13-
return np.eye(4)
14-
15-
1612
def test_flattened_image_extracts_group_masked_values() -> None:
1713
shape = (3, 3, 3)
1814
group_mask = np.zeros(shape, dtype=bool)
@@ -24,8 +20,8 @@ def test_flattened_image_extracts_group_masked_values() -> None:
2420
scalar[2, 0, 1] = 2.5
2521

2622
indiv_mask = group_mask.copy()
27-
scalar_img = nb.Nifti1Image(scalar, _eye_affine())
28-
mask_img = nb.Nifti1Image(indiv_mask.astype(np.float32), _eye_affine())
23+
scalar_img = nb.Nifti1Image(scalar, np.eye(4))
24+
mask_img = nb.Nifti1Image(indiv_mask.astype(np.float32), np.eye(4))
2925

3026
flat = flattened_image(scalar_img, mask_img, group_mask)
3127
assert flat.shape == (2,)
@@ -43,8 +39,8 @@ def test_flattened_image_nan_outside_individual_mask() -> None:
4339
indiv = group_mask.copy()
4440
indiv[1, 1, 1] = False
4541

46-
scalar_img = nb.Nifti1Image(scalar, _eye_affine())
47-
mask_img = nb.Nifti1Image(indiv.astype(np.float32), _eye_affine())
42+
scalar_img = nb.Nifti1Image(scalar, np.eye(4))
43+
mask_img = nb.Nifti1Image(indiv.astype(np.float32), np.eye(4))
4844

4945
flat = flattened_image(scalar_img, mask_img, group_mask)
5046
assert flat.shape == (2,)

test/utils.py

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
"""Shared test utility functions for ModelArrayIO tests."""
2+
3+
from __future__ import annotations
4+
5+
import nibabel as nb
6+
import numpy as np
7+
from nibabel.cifti2.cifti2_axes import BrainModelAxis, ParcelsAxis, ScalarAxis
8+
9+
10+
def make_parcels_axis(parcel_names: list[str]) -> ParcelsAxis:
11+
"""Create a minimal surface-only ParcelsAxis (one vertex per parcel on the left cortex)."""
12+
n = len(parcel_names)
13+
nvertices = {'CIFTI_STRUCTURE_CORTEX_LEFT': n}
14+
vox_dtype = np.dtype([('ijk', '<i4', (3,))])
15+
voxels = [np.array([], dtype=vox_dtype) for _ in range(n)]
16+
vertices = [{'CIFTI_STRUCTURE_CORTEX_LEFT': np.array([i], dtype=np.int32)} for i in range(n)]
17+
return ParcelsAxis(parcel_names, voxels, vertices, np.eye(4), (10, 10, 10), nvertices)
18+
19+
20+
def make_dscalar(mask_bool: np.ndarray, values: np.ndarray) -> nb.Cifti2Image:
21+
"""Create a synthetic dscalar CIFTI image from a volumetric boolean mask and values."""
22+
scalar_axis = ScalarAxis(['synthetic'])
23+
brain_axis = BrainModelAxis.from_mask(mask_bool)
24+
header = nb.cifti2.Cifti2Header.from_axes((scalar_axis, brain_axis))
25+
return nb.Cifti2Image(values.reshape(1, -1).astype(np.float32), header=header)
26+
27+
28+
def make_pscalar(parcel_names: list[str], values: np.ndarray) -> nb.Cifti2Image:
29+
"""Create a synthetic pscalar CIFTI image with the given parcel names and values."""
30+
scalar_axis = ScalarAxis(['synthetic'])
31+
parcels_axis = make_parcels_axis(parcel_names)
32+
header = nb.cifti2.Cifti2Header.from_axes((scalar_axis, parcels_axis))
33+
return nb.Cifti2Image(values.reshape(1, -1).astype(np.float32), header=header)
34+
35+
36+
def make_pconn(parcel_names: list[str], values: np.ndarray) -> nb.Cifti2Image:
37+
"""Create a synthetic pconn CIFTI image; values are reshaped to (n_parcels, n_parcels)."""
38+
parcels_axis = make_parcels_axis(parcel_names)
39+
header = nb.cifti2.Cifti2Header.from_axes((parcels_axis, parcels_axis))
40+
n = len(parcel_names)
41+
return nb.Cifti2Image(values.reshape(n, n).astype(np.float32), header=header)
42+
43+
44+
def make_nifti(data: np.ndarray, affine: np.ndarray | None = None) -> nb.Nifti1Image:
45+
"""Create a Nifti1Image with the given data and an optional affine (default: identity)."""
46+
if affine is None:
47+
affine = np.eye(4)
48+
return nb.Nifti1Image(data.astype(np.float32), affine)

0 commit comments

Comments
 (0)