Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ NEW
---

- :bdg-success:`Doc` Create doc with `furo <https://github.com/pradyunsg/furo>`_.
- :bdg-success:`Enhancement` Worflows generate a report file.
- :bdg-success:`Enhancement` Workflows generate a report file.
- :bdg-success:`Enhancement` Anonymize workflow outputs.
- :bdg-success:`Enhancement` Worflows generate BIDS-compliant organization.
- :bdg-success:`Enhancement` workflows generate BIDS-compliant organization.
- :bdg-success:`Enhancement` New workflows can generate HTML reporting.
- :bdg-success:`Datasets` Toy datasets have been added to test the module.
- :bdg-success:`Enhancement` Quasi-RAW preprocessing compute the brain mask
Expand Down
4 changes: 2 additions & 2 deletions tools/build/build_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,8 +172,8 @@ def main(
image_file = str(image_template).format(
workflow=name.replace("plot_", ""),
)
worflow_type = step_commands[0][1].split("-")[0]
selected_conf = confs.get(worflow_type, confs["default"])
workflow_type = step_commands[0][1].split("-")[0]
selected_conf = confs.get(workflow_type, confs["default"])
if selected_conf.get("freesurfer", False):
if infra == "ccc":
image_parameters_ += (
Expand Down
28 changes: 17 additions & 11 deletions tools/scaling/scaling_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"""

import hashlib
import os
import re
import tomllib
from datetime import date, datetime
Expand Down Expand Up @@ -261,14 +262,18 @@ def organize_bids_tab(
print(banner)

df = pd.read_csv(tab_file, sep="\t", dtype=str)
if isinstance(tab_file, Path):
rawdata_path = str(tab_file.parent)
elif isinstance(tab_file, str):
rawdata_path = os.path.dirname(tab_file)

record = {}
for _, row in df.iterrows():
modality = row["submod"]
row = {
"subject": row["sub"],
"session": row["ses"],
modality: row["path"],
modality: row["path"].replace("./", f"{rawdata_path}/"),
f"{modality}_md5_hash": (
row["md5sum"] if with_hash else None
),
Expand Down Expand Up @@ -364,7 +369,7 @@ def collect_config(
config_file: str | Path,
dfs: dict[str, pd.DataFrame],
long_dfs: dict[str, pd.DataFrame],
worflow_id: str,
workflow_id: str,
workflow_parameters: str,
workflow_resource: dict,
image_dir: str | Path,
Expand Down Expand Up @@ -392,8 +397,8 @@ def collect_config(
One DataFrame per modality, with one row per subject/session.
If multiple files exist for a modality, they are expanded into
columns named "<modality>-1", "<modality>-2".
worflow_id : str
The workflow dcalred name in brainprep CLI.
workflow_id : str
The workflow declared name in brainprep CLI.
workflow_parameters : str
A command-line template containing placeholders like {T1w}.
workflow_resource : dict
Expand All @@ -418,10 +423,10 @@ def collect_config(
"""
print(banner)

workflow_name = worflow_id.split("-")[-1]
workflow_name = workflow_id.split("-")[-1]
if workflow_name == "qa":
workflow_name = "quality_assurance"
workflow_type = worflow_id.split("-")[0]
workflow_type = workflow_id.split("-")[0]
print(f"- name: {workflow_name}")
print(f"- type: {workflow_type}")
print(f"- parameters: {workflow_parameters}")
Expand Down Expand Up @@ -486,6 +491,7 @@ def collect_config(
if infra == "slurm":
image_parameters = (
f"--cleanenv --home {home_dir} --bind {bind_dir} "
f"--bind {output_dir} "
)
else:
image_parameters = ""
Expand Down Expand Up @@ -513,7 +519,7 @@ def collect_config(
name=workflow_name,
operator="TO UPDATE",
date=str(datetime.now().date()),
commands=f'"brainprep {workflow_name} {workflow_parameters}"',
commands=f'"brainprep {workflow_id} {workflow_parameters}"',
parameters=image_parameters,
cluster=infra,
partition=partition,
Expand Down Expand Up @@ -656,17 +662,17 @@ def scan_configs(
f"Unknown workflow IDs: {', '.join(sorted(unknown))}. "
f"Valid workflows are: {', '.join(sorted(known_workflows))}"
)
for worflow_id, workflow_parameters in workflows.items():
if worflow_id not in allowed_workflows:
print(f"\n-- skip: {worflow_id} --")
for workflow_id, workflow_parameters in workflows.items():
if workflow_id not in allowed_workflows:
print(f"\n-- skip: {workflow_id} --")
continue
collect_config(
infra,
root.parent,
config_file,
dfs,
long_dfs,
worflow_id,
workflow_id,
workflow_parameters,
workflow_resource,
image_dir,
Expand Down
Loading