Skip to content

Commit 7abd745

Browse files
authored
Merge pull request #50 from AymericGaudin/scaling
Corrections to tools/scaling
2 parents 56296c4 + 34930a3 commit 7abd745

3 files changed

Lines changed: 21 additions & 15 deletions

File tree

CHANGELOG.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ NEW
3636
---
3737

3838
- :bdg-success:`Doc` Create doc with `furo <https://github.com/pradyunsg/furo>`_.
39-
- :bdg-success:`Enhancement` Worflows generate a report file.
39+
- :bdg-success:`Enhancement` Workflows generate a report file.
4040
- :bdg-success:`Enhancement` Anonymize workflow outputs.
41-
- :bdg-success:`Enhancement` Worflows generate BIDS-compliant organization.
41+
- :bdg-success:`Enhancement` workflows generate BIDS-compliant organization.
4242
- :bdg-success:`Enhancement` New workflows can generate HTML reporting.
4343
- :bdg-success:`Datasets` Toy datasets have been added to test the module.
4444
- :bdg-success:`Enhancement` Quasi-RAW preprocessing compute the brain mask

tools/build/build_tests.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -172,8 +172,8 @@ def main(
172172
image_file = str(image_template).format(
173173
workflow=name.replace("plot_", ""),
174174
)
175-
worflow_type = step_commands[0][1].split("-")[0]
176-
selected_conf = confs.get(worflow_type, confs["default"])
175+
workflow_type = step_commands[0][1].split("-")[0]
176+
selected_conf = confs.get(workflow_type, confs["default"])
177177
if selected_conf.get("freesurfer", False):
178178
if infra == "ccc":
179179
image_parameters_ += (

tools/scaling/scaling_cli.py

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
"""
1111

1212
import hashlib
13+
import os
1314
import re
1415
import tomllib
1516
from datetime import date, datetime
@@ -261,14 +262,18 @@ def organize_bids_tab(
261262
print(banner)
262263

263264
df = pd.read_csv(tab_file, sep="\t", dtype=str)
265+
if isinstance(tab_file, Path):
266+
rawdata_path = str(tab_file.parent)
267+
elif isinstance(tab_file, str):
268+
rawdata_path = os.path.dirname(tab_file)
264269

265270
record = {}
266271
for _, row in df.iterrows():
267272
modality = row["submod"]
268273
row = {
269274
"subject": row["sub"],
270275
"session": row["ses"],
271-
modality: row["path"],
276+
modality: row["path"].replace("./", f"{rawdata_path}/"),
272277
f"{modality}_md5_hash": (
273278
row["md5sum"] if with_hash else None
274279
),
@@ -364,7 +369,7 @@ def collect_config(
364369
config_file: str | Path,
365370
dfs: dict[str, pd.DataFrame],
366371
long_dfs: dict[str, pd.DataFrame],
367-
worflow_id: str,
372+
workflow_id: str,
368373
workflow_parameters: str,
369374
workflow_resource: dict,
370375
image_dir: str | Path,
@@ -392,8 +397,8 @@ def collect_config(
392397
One DataFrame per modality, with one row per subject/session.
393398
If multiple files exist for a modality, they are expanded into
394399
columns named "<modality>-1", "<modality>-2".
395-
worflow_id : str
396-
The workflow dcalred name in brainprep CLI.
400+
workflow_id : str
401+
The workflow declared name in brainprep CLI.
397402
workflow_parameters : str
398403
A command-line template containing placeholders like {T1w}.
399404
workflow_resource : dict
@@ -418,10 +423,10 @@ def collect_config(
418423
"""
419424
print(banner)
420425

421-
workflow_name = worflow_id.split("-")[-1]
426+
workflow_name = workflow_id.split("-")[-1]
422427
if workflow_name == "qa":
423428
workflow_name = "quality_assurance"
424-
workflow_type = worflow_id.split("-")[0]
429+
workflow_type = workflow_id.split("-")[0]
425430
print(f"- name: {workflow_name}")
426431
print(f"- type: {workflow_type}")
427432
print(f"- parameters: {workflow_parameters}")
@@ -486,6 +491,7 @@ def collect_config(
486491
if infra == "slurm":
487492
image_parameters = (
488493
f"--cleanenv --home {home_dir} --bind {bind_dir} "
494+
f"--bind {output_dir} "
489495
)
490496
else:
491497
image_parameters = ""
@@ -513,7 +519,7 @@ def collect_config(
513519
name=workflow_name,
514520
operator="TO UPDATE",
515521
date=str(datetime.now().date()),
516-
commands=f'"brainprep {workflow_name} {workflow_parameters}"',
522+
commands=f'"brainprep {workflow_id} {workflow_parameters}"',
517523
parameters=image_parameters,
518524
cluster=infra,
519525
partition=partition,
@@ -656,17 +662,17 @@ def scan_configs(
656662
f"Unknown workflow IDs: {', '.join(sorted(unknown))}. "
657663
f"Valid workflows are: {', '.join(sorted(known_workflows))}"
658664
)
659-
for worflow_id, workflow_parameters in workflows.items():
660-
if worflow_id not in allowed_workflows:
661-
print(f"\n-- skip: {worflow_id} --")
665+
for workflow_id, workflow_parameters in workflows.items():
666+
if workflow_id not in allowed_workflows:
667+
print(f"\n-- skip: {workflow_id} --")
662668
continue
663669
collect_config(
664670
infra,
665671
root.parent,
666672
config_file,
667673
dfs,
668674
long_dfs,
669-
worflow_id,
675+
workflow_id,
670676
workflow_parameters,
671677
workflow_resource,
672678
image_dir,

0 commit comments

Comments
 (0)