From d29115d7039837ff65f5fd3c6761d0f4800e20ec Mon Sep 17 00:00:00 2001 From: Dominika Zemanovicova Date: Wed, 17 Jan 2024 23:17:18 +0100 Subject: [PATCH] Use matched_runs instead of bids_mappings --- bidscoin/bids.py | 113 ++--------- bidscoin/bidscoiner.py | 14 -- bidscoin/plugins/dcm2niix2bids.py | 21 +-- bidscoin/plugins/nibabel2bids.py | 16 +- bidscoin/plugins/spec2nii2bids.py | 16 +- tests/test_bids.py | 303 +----------------------------- 6 files changed, 35 insertions(+), 448 deletions(-) diff --git a/bidscoin/bids.py b/bidscoin/bids.py index 0e9f59a4..f4b1da82 100644 --- a/bidscoin/bids.py +++ b/bidscoin/bids.py @@ -313,32 +313,6 @@ def dynamicvalue(self, value, cleanup: bool=True, runtime: bool=False): return value -class BidsMapping: - """ - Represents a mapping of BIDS target files from source data. - :param source: Path to source data - :param targets: BIDS target files converted from source data - :param datatype: The BIDS data type of the data source and targets - :param run: Bidsmap run used for conversion - """ - def __init__(self, source: Path, targets: Set[Path], datatype: str, run: Dict): - """ - Initialize BidsMapping. - :param source: Path to source data - :param targets: BIDS target files converted from source data - :param datatype: The BIDS data type of the data source and targets - :param run: Bidsmap run used for conversion - """ - self.source = source - self.targets = targets - self.datatype = datatype - self.run = run - - def __repr__(self): - return (f"BidsMapping(source={self.source!r}, targets={self.targets!r}, " - f"datatype={self.datatype!r})") - - def unpack(sourcefolder: Path, wildcard: str='', workfolder: Path='') -> Tuple[List[Path], bool]: """ Unpacks and sorts DICOM files in sourcefolder to a temporary folder if sourcefolder contains a DICOMDIR file or .tar.gz, .gz or .zip files @@ -1398,7 +1372,8 @@ def get_run_(provenance: Union[str, Path]='', dataformat: str='', datatype: str= attributes = {}, bids = {}, meta = {}, - datasource = datasource) + datasource = datasource, + targets = set()) def get_run(bidsmap: dict, datatype: str, suffix_idx: Union[int, str], datasource: DataSource) -> dict: @@ -1514,7 +1489,7 @@ def delete_run(bidsmap: dict, provenance: Union[dict, str], datatype: str= '', d def append_run(bidsmap: dict, run: dict, clean: bool=True) -> None: """ - Append a run to the BIDS map + Append a run to the BIDS map, without targets :param bidsmap: Full bidsmap data structure, with all options, BIDS labels and attributes, etc. :param run: The run (listitem) that is appended to the datatype @@ -1538,7 +1513,10 @@ def append_run(bidsmap: dict, run: dict, clean: bool=True) -> None: run_[item].update(run[item]) run = run_ + else: + run = copy.copy(run) # popping targets will not change original run + run.pop("targets", None) if not bidsmap.get(dataformat): bidsmap[dataformat] = {datatype: []} if not bidsmap.get(dataformat).get(datatype): @@ -1970,18 +1948,18 @@ def increment_runindex(outfolder: Path, bidsname: str, run: dict) -> Union[Path, return f"{bidsname}.{suffixes}" if suffixes else bidsname -def rename_runless_to_run1(bids_mappings: List[BidsMapping], scans_table: pd.DataFrame) -> None: +def rename_runless_to_run1(matched_runs: List[dict], scans_table: pd.DataFrame) -> None: """ - Adds run-1 label to run-less files that use dynamic index (<<>>) in matched bidsmap entry and for which other runs - exist in the output folder. Additionally, 'scans_table' is updated based on the changes. - :param bids_mappings: Bids mappings of source to BIDS targets + Adds run-1 label to run-less files that use dynamic index (<<>>) in bidsmap run-items for which files with + run-2 label exist in the output folder. Additionally, 'scans_table' is updated based on the changes. + :param matched_runs: Bidsmap run-items with accumulated files under 'target' (all files created via that run-item) :param scans_table: BIDS scans.tsv dataframe with all filenames and acquisition timestamps """ - for bids_mapping in bids_mappings: - if bids_mapping.run.get('bids', {}).get('run') != '<<>>': + for matched_run in matched_runs: + if matched_run.get('bids', {}).get('run') != '<<>>': continue - for bids_target in bids_mapping.targets.copy(): # copy: avoid problems with removing items within loop + for bids_target in matched_run["targets"].copy(): # copy: avoid problems with removing items within loop bidsname = bids_target.name suffixes = '' if '.' in bidsname: @@ -2008,8 +1986,8 @@ def rename_runless_to_run1(bids_mappings: List[BidsMapping], scans_table: pd.Dat inplace=True ) # NB: '/' as_posix # change bids_target from run-less to run-1 - bids_mapping.targets.remove(bids_target) - bids_mapping.targets.add((outfolder / run1_bidsname).with_suffix(suffixes)) + matched_run["targets"].remove(bids_target) + matched_run["targets"].add((outfolder / run1_bidsname).with_suffix(suffixes)) def updatemetadata(sourcemeta: Path, targetmeta: Path, usermeta: dict, extensions: list, datasource: DataSource) -> dict: @@ -2168,67 +2146,6 @@ def addparticipant(participants_tsv: Path, subid: str='', sesid: str='', data: d return table, meta -def add_bids_mappings(bids_mappings: List[BidsMapping], session: Path, bidsfolder: Path, bidsses: Path) -> None: - """ - Create and/or add (if it's not there yet) bids mappings of session to the code/bidscoin/bids_mappings.tsv file - :param bids_mappings: Bids mappings of source to BIDS targets to be added to bids_mappings.tsv - :param session: The full-path name of the subject/session source folder - :param bidsfolder: The name of the BIDS root folder - :param bidsses: The full-path name of the BIDS output `sub-/ses-` folder - :return: None - """ - # Write mappings - out = bidsfolder / "code" / "bidscoin" / "bids_mappings.tsv" - if out.is_file(): - df_existing = pd.read_csv(out, sep='\t') - if 'session' not in df_existing.columns: - df_existing.insert(1, 'session', None) - else: - df_existing = pd.DataFrame(columns=['subject', 'session', 'SeriesDescription', 'source', 'BIDS_mapping']) - - # Convert bids_mappings to DataFrame - entries = [] - for bids_mapping in bids_mappings: - for target in sorted(bids_mapping.targets): - if bidsses.name.startswith('ses-'): - target_subject = bidsses.parent.name - target_session = bidsses.name - else: - target_subject = bidsses.name - target_session = None - if target.relative_to(bidsfolder).parts[0] == "derivatives": - target_outfolder = bidsfolder - else: - target_outfolder = bidsses - new_entry = { - "subject": target_subject, - "session": target_session, - 'SeriesDescription': bids_mapping.run.get("attributes", {}).get("SeriesDescription"), - 'source': bids_mapping.source.relative_to(session.parent), - 'BIDS_mapping': target.relative_to(target_outfolder), - } - entries.append(new_entry) - df_mappings = pd.DataFrame(entries) - df_combined = pd.concat([df_existing, df_mappings], ignore_index=True) - - # save bids mappings - out.parent.mkdir(parents=True, exist_ok=True) - LOGGER.verbose(f"Writing bids mappings data to: {out}") - df_combined.to_csv(out, sep='\t', index=False) - - -def drop_session_from_bids_mappings(bids_mappings_file: Path) -> None: - """ - Drops session column from bids_mappings.tsv if no session. - :param bids_mappings_file: Path to bids_mappings.tsv - """ - if bids_mappings_file.exists(): - df_mappings = pd.read_csv(bids_mappings_file, sep='\t') - if df_mappings["session"].isna().all(): - df_mappings.drop(columns="session", inplace=True) - df_mappings.to_csv(bids_mappings_file, sep='\t', index=False) - - def get_propertieshelp(propertieskey: str) -> str: """ Reads the description of a matching attributes key in the source dictionary diff --git a/bidscoin/bidscoiner.py b/bidscoin/bidscoiner.py index 9c9b704c..50caeb28 100755 --- a/bidscoin/bidscoiner.py +++ b/bidscoin/bidscoiner.py @@ -56,12 +56,6 @@ def bidscoiner(rawfolder: str, bidsfolder: str, subjects: list=(), force: bool=F # Create a code/bidscoin subfolder (bidsfolder/'code'/'bidscoin').mkdir(parents=True, exist_ok=True) - # Delete bids_mappings file if it exists - bids_mappings_file = bidsfolder / 'code' / 'bidscoin' / 'bids_mappings.tsv' - if bids_mappings_file.exists(): - LOGGER.info('Deleting old code/bidscoin/bids_mappings.tsv') - bids_mappings_file.unlink() - # Create a dataset description file if it does not exist dataset_file = bidsfolder/'dataset_description.json' generatedby = [{"Name":"BIDScoin", 'Version':__version__, 'Description:':'A flexible GUI application suite that converts source datasets to BIDS', 'CodeURL':'https://github.com/Donders-Institute/bidscoin'}] @@ -179,9 +173,6 @@ def bidscoiner(rawfolder: str, bidsfolder: str, subjects: list=(), force: bool=F LOGGER.info('') if not jobids: - # delete session column from bids_mappings if no sessions - bids.drop_session_from_bids_mappings(bids_mappings_file) - LOGGER.info('============== HPC FINISH =============') LOGGER.info('') return @@ -239,9 +230,6 @@ def bidscoiner(rawfolder: str, bidsfolder: str, subjects: list=(), force: bool=F if not DEBUG: shutil.rmtree(bidsfolder/'HPC_work', ignore_errors=True) - # delete session column from bids_mappings if no sessions - bids.drop_session_from_bids_mappings(bids_mappings_file) - LOGGER.info('') LOGGER.info('============== HPC FINISH =============') LOGGER.info('') @@ -309,8 +297,6 @@ def bidscoiner(rawfolder: str, bidsfolder: str, subjects: list=(), force: bool=F if unpacked: shutil.rmtree(sesfolder) - # delete session column from bids_mappings if no sessions - bids.drop_session_from_bids_mappings(bids_mappings_file) LOGGER.info('-------------- FINISHED! ------------') LOGGER.info('') diff --git a/bidscoin/plugins/dcm2niix2bids.py b/bidscoin/plugins/dcm2niix2bids.py index 846f3330..aa400727 100644 --- a/bidscoin/plugins/dcm2niix2bids.py +++ b/bidscoin/plugins/dcm2niix2bids.py @@ -13,7 +13,6 @@ from typing import Union, List from pathlib import Path from bidscoin import bcoin, bids, lsdirs, due, Doi -from bidscoin.bids import BidsMapping from bidscoin.utilities import physio try: from nibabel.testing import data_path @@ -229,7 +228,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None scans_table.index.name = 'filename' # Process all the source files or run subfolders - bids_mappings: List[BidsMapping] = [] + matched_runs: List[dict] = [] sourcefile = Path() for source in sources: @@ -248,18 +247,15 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None # Check if we should ignore this run if datasource.datatype in bidsmap['Options']['bidscoin']['ignoretypes']: LOGGER.info(f"--> Leaving out: {source}") - bids_mappings.append(BidsMapping(source, {Path(bidsses / 'X')}, datasource.datatype, run)) continue # Check if we already know this run if not match: LOGGER.error(f"--> Skipping unknown '{datasource.datatype}' run: {sourcefile}\n-> Re-run the bidsmapper and delete {bidsses} to solve this warning") - bids_mappings.append(BidsMapping(source, {Path(bidsses / 'skipped')}, datasource.datatype, run)) continue LOGGER.info(f"--> Coining: {source}") - bids_mapping = BidsMapping(source, set(), datasource.datatype, run) - bids_mappings.append(bids_mapping) + matched_runs.append(run) # Create the BIDS session/datatype output folder suffix = datasource.dynamicvalue(run['bids']['suffix'], True, True) @@ -302,7 +298,6 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None physiodata = physio.readphysio(sourcefile) physio.physio2tsv(physiodata, outfolder/bidsname) jsonfiles.update(outfolder.glob(f"{bidsname}.json")) # add existing created json files: bidsname.json - bids_mapping.targets.add((outfolder / bidsname).with_suffix('.tsv.gz')) except Exception as physioerror: LOGGER.error(f"Could not read/convert physiological file: {sourcefile}\n{physioerror}") continue @@ -319,7 +314,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None if not list(outfolder.glob(f"{bidsname}.*nii*")): continue jsonfiles.update(outfolder.glob(f"{bidsname}.json")) # add existing created json files: bidsname.json - bids_mapping.targets.update(outfolder.glob(f"{bidsname}.*[!json]")) + run["targets"].update(outfolder.glob(f"{bidsname}.*[!json]")) # add files created using this bidsmap run-item (except sidecars) # Handle the ABCD GE pepolar sequence extrafile = list(outfolder.glob(f"{bidsname}a.nii*")) @@ -433,7 +428,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None if newbidsfile.is_file(): LOGGER.warning(f"Overwriting existing {newbidsfile} file -- check your results carefully!") dcm2niixfile.replace(newbidsfile) - bids_mapping.targets.add(newbidsfile) + run["targets"].add(newbidsfile) # Rename all associated files (i.e. the json-, bval- and bvec-files) oldjsonfile = dcm2niixfile.with_suffix('').with_suffix('.json') @@ -467,8 +462,8 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None LOGGER.verbose(f"Removing BIDS-invalid b0-file: {bfile} -> {jsonfile}") metadata[ext[1:]] = bdata.values.tolist() bfile.unlink() - if bfile in bids_mapping.targets: - bids_mapping.targets.remove(bfile) + if bfile in run["targets"]: + run["targets"].remove(bfile) # Save the meta-data to the json sidecar-file with jsonfile.open('w') as json_fid: @@ -498,9 +493,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None scans_table.loc[scanpath.as_posix(), 'acq_time'] = acq_time # Handle dynamic index for run-1 - bids.rename_runless_to_run1(bids_mappings, scans_table) - # Write bids mappings - bids.add_bids_mappings(bids_mappings, session, bidsfolder, bidsses) + bids.rename_runless_to_run1(matched_runs, scans_table) # Write the scans_table to disk LOGGER.verbose(f"Writing acquisition time data to: {scans_tsv}") diff --git a/bidscoin/plugins/nibabel2bids.py b/bidscoin/plugins/nibabel2bids.py index dfb893a2..f4b94f5c 100644 --- a/bidscoin/plugins/nibabel2bids.py +++ b/bidscoin/plugins/nibabel2bids.py @@ -12,7 +12,6 @@ from typing import List, Union from pathlib import Path from bidscoin import bids -from bidscoin.bids import BidsMapping try: from nibabel.testing import data_path @@ -166,11 +165,9 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> None: # Get the subject identifiers and the BIDS root folder from the bidsses folder if bidsses.name.startswith('ses-'): - bidsfolder = bidsses.parent.parent subid = bidsses.parent.name sesid = bidsses.name else: - bidsfolder = bidsses.parent subid = bidsses.name sesid = '' @@ -192,7 +189,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> None: scans_table.index.name = 'filename' # Collect the different Nibabel source files for all files in the session - bids_mappings: List[BidsMapping] = [] + matched_runs: List[dict] = [] for sourcefile in sourcefiles: datasource = bids.DataSource(sourcefile, {'nibabel2bids':options}) @@ -201,18 +198,15 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> None: # Check if we should ignore this run if datasource.datatype in bidsmap['Options']['bidscoin']['ignoretypes']: LOGGER.info(f"--> Leaving out: {sourcefile}") - bids_mappings.append(BidsMapping(sourcefile, {Path(bidsses / 'X')}, datasource.datatype, run)) continue # Check if we already know this run if not match: LOGGER.error(f"Skipping unknown '{datasource.datatype}' run: {sourcefile}\n-> Re-run the bidsmapper and delete {bidsses} to solve this warning") - bids_mappings.append(BidsMapping(sourcefile, {Path(bidsses / 'skipped')}, datasource.datatype, run)) continue LOGGER.info(f"--> Coining: {sourcefile}") - bids_mapping = BidsMapping(sourcefile, set(), datasource.datatype, run) - bids_mappings.append(bids_mapping) + matched_runs.append(run) # Create the BIDS session/datatype output folder outfolder = bidsses/datasource.datatype @@ -238,7 +232,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> None: # Save the sourcefile as a BIDS NIfTI file nib.save(nib.load(sourcefile), bidsfile) - bids_mapping.targets.add(bidsfile) + run["targets"].add(bidsfile) # Load / copy over the source meta-data sidecar = bidsfile.with_suffix('').with_suffix('.json') @@ -251,9 +245,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> None: scans_table.loc[bidsfile.relative_to(bidsses).as_posix(), 'acq_time'] = acq_time.isoformat() # Handle dynamic index for run-1 - bids.rename_runless_to_run1(bids_mappings, scans_table) - # Write bids mappings - bids.add_bids_mappings(bids_mappings, session, bidsfolder, bidsses) + bids.rename_runless_to_run1(matched_runs, scans_table) # Write the scans_table to disk LOGGER.verbose(f"Writing data to: {scans_tsv}") diff --git a/bidscoin/plugins/spec2nii2bids.py b/bidscoin/plugins/spec2nii2bids.py index 3f50e17c..66b77164 100644 --- a/bidscoin/plugins/spec2nii2bids.py +++ b/bidscoin/plugins/spec2nii2bids.py @@ -12,7 +12,6 @@ from bids_validator import BIDSValidator from pathlib import Path from bidscoin import bcoin, bids, due, Doi -from bidscoin.bids import BidsMapping LOGGER = logging.getLogger(__name__) @@ -174,11 +173,9 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None # Get the subject identifiers and the BIDS root folder from the bidsses folder if bidsses.name.startswith('ses-'): - bidsfolder = bidsses.parent.parent subid = bidsses.parent.name sesid = bidsses.name else: - bidsfolder = bidsses.parent subid = bidsses.name sesid = '' @@ -200,7 +197,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None scans_table.index.name = 'filename' # Loop over all MRS source data files and convert them to BIDS - bids_mappings: List[BidsMapping] = [] + matched_runs: List[dict] = [] for sourcefile in sourcefiles: # Get a data source, a matching run from the bidsmap @@ -210,18 +207,15 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None # Check if we should ignore this run if datasource.datatype in bidsmap['Options']['bidscoin']['ignoretypes']: LOGGER.info(f"--> Leaving out: {sourcefile}") - bids_mappings.append(BidsMapping(sourcefile, {Path(bidsses / 'X')}, datasource.datatype, run)) continue # Check that we know this run if index is None: LOGGER.error(f"Skipping unknown '{datasource.datatype}' run: {sourcefile}\n-> Re-run the bidsmapper and delete the MRS output data in {bidsses} to solve this warning") - bids_mappings.append(BidsMapping(sourcefile, {Path(bidsses / 'skipped')}, datasource.datatype, run)) continue LOGGER.info(f"--> Coining: {sourcefile}") - bids_mapping = BidsMapping(sourcefile, set(), datasource.datatype, run) - bids_mappings.append(bids_mapping) + matched_runs.append(run) # Create the BIDS session/datatype output folder outfolder = bidsses/datasource.datatype @@ -264,7 +258,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None if bcoin.run_command(f'{command} {dformat} -j -f "{bidsname}" -o "{outfolder}" {args} {arg} "{sourcefile}"'): if not list(outfolder.glob(f"{bidsname}.nii*")): continue - bids_mapping.targets.update(outfolder.glob(f"{bidsname}.*[!json]")) + run["targets"].update(outfolder.glob(f"{bidsname}.*[!json]")) # add files created using this bidsmap run-item (except sidecars) # Load / copy over and adapt the newly produced json sidecar-file (NB: assumes every NIfTI-file comes with a json-file) metadata = bids.updatemetadata(sourcefile, sidecar, run['meta'], options['meta'], datasource) @@ -294,9 +288,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None scans_table.loc[sidecar.with_suffix('.nii.gz').relative_to(bidsses).as_posix(), 'acq_time'] = acq_time # Handle dynamic index for run-1 - bids.rename_runless_to_run1(bids_mappings, scans_table) - # Write bids mappings - bids.add_bids_mappings(bids_mappings, session, bidsfolder, bidsses) + bids.rename_runless_to_run1(matched_runs, scans_table) # Write the scans_table to disk LOGGER.verbose(f"Writing acquisition time data to: {scans_tsv}") diff --git a/tests/test_bids.py b/tests/test_bids.py index fda9614a..1d012763 100644 --- a/tests/test_bids.py +++ b/tests/test_bids.py @@ -9,7 +9,6 @@ from nibabel.testing import data_path from pydicom.data import get_testdata_file from bidscoin import bcoin, bids, bidsmap_template -from bidscoin.bids import BidsMapping bcoin.setup_logging() @@ -462,8 +461,8 @@ def test_rename_runless_to_run1(tmp_path): """Test <<>> index renaming run-less files to run-1 files.""" # Create data - run = {'bids': {'run': '<<>>'}} - bids_mappings = [] + run = {'bids': {'run': '<<>>'}, 'targets': set()} + matched_runs = [] old_runless_bidsname = 'sub-01_T1w' new_run1_bidsname = 'sub-01_run-1_T1w' run2_bidsname = 'sub-01_run-2_T1w' @@ -474,7 +473,8 @@ def test_rename_runless_to_run1(tmp_path): outfile = (outfolder/file_name).with_suffix(suffix) outfile.touch() if suffix == '.nii.gz': - bids_mappings.append(BidsMapping(Path("dummy_source"), {outfile}, 'anat', run)) + run["targets"].add(outfile) + matched_runs.append(run) # Create the scans table scans_data = { @@ -489,7 +489,7 @@ def test_rename_runless_to_run1(tmp_path): result_scans_table = pd.DataFrame(result_scans_data).set_index('filename') # Run the function - bids.rename_runless_to_run1(bids_mappings, scans_table) + bids.rename_runless_to_run1(matched_runs, scans_table) # Check the results assert result_scans_table.equals(scans_table) @@ -498,299 +498,6 @@ def test_rename_runless_to_run1(tmp_path): assert (outfolder/new_run1_bidsname).with_suffix(suffix).is_file() is True -def test_add_bids_mappings__new(tmp_path): - """Test creating new 'bids_mappings.tsv'.""" - - sessionfolder = tmp_path/'source'/'sub-01' - bidsfolder = tmp_path/'bids' - bidsses = tmp_path/'bids'/'sub-01' - out = bidsfolder / "code" / "bidscoin" / "bids_mappings.tsv" - - bids_mappings = [ - BidsMapping( - sessionfolder / 'anat_source', - {bidsses/'anat'/'sub-01_T1w.nii.gz'}, - 'anat', - {'bids': {'run': '<<>>'}, 'attributes': {'SeriesDescription': 't1'} } - ), - BidsMapping( - sessionfolder / 'func_source', - { - bidsses / 'func' / 'sub-01_task-dummy_bold.nii.gz', - bidsses / 'func' / 'sub-01_task-dummy_part-phase_bold.nii.gz' - }, - 'func', - {'bids': {'run': '<<>>'}, 'attributes': {'SeriesDescription': 'bold'}} - ) - ] - expected_df = pd.DataFrame( - { - "subject": ["sub-01", "sub-01", "sub-01"], - "session": ['NaN', 'NaN', 'NaN'], - "SeriesDescription": ["t1", "bold", "bold"], - "source": [str(Path("sub-01") / src) for src in ["anat_source", "func_source", "func_source"]], - "BIDS_mapping": [ - str(Path(outdir) / file) for outdir, file in [ - ("anat", "sub-01_T1w.nii.gz"), - ("func", "sub-01_task-dummy_bold.nii.gz"), - ("func", "sub-01_task-dummy_part-phase_bold.nii.gz") - ] - ] - } - ) - - # Run the function - bids.add_bids_mappings(bids_mappings, sessionfolder, bidsfolder, bidsses) - - # Check the results - assert out.is_file() is True - result_df = pd.read_csv(out, sep='\t') - result_df['session'].fillna('NaN', inplace=True) - assert result_df.equals(expected_df) - - -def test_add_bids_mappings__combined(tmp_path): - """Test adding new bids_mappings to 'bids_mappings.tsv'.""" - - sessionfolder = tmp_path/'source'/'sub-02' - bidsfolder = tmp_path/'bids' - bidsses = tmp_path/'bids'/'sub-02' - out = bidsfolder / "code" / "bidscoin" / "bids_mappings.tsv" - - # Create existing bids_mappings for sub-01 - existing_df = pd.DataFrame( - { - "subject": ["sub-01", "sub-01", "sub-01"], - "SeriesDescription": ["t1", "bold", "bold"], - "source": [str(Path("sub-01") / src) for src in ["anat_source", "func_source", "func_source"]], - "BIDS_mapping": [ - str(Path(outdir) / file) for outdir, file in [ - ("anat", "sub-01_T1w.nii.gz"), - ("func", "sub-01_task-dummy_bold.nii.gz"), - ("func", "sub-01_task-dummy_part-phase_bold.nii.gz") - ] - ] - } - ) - out.parent.mkdir(parents=True) - existing_df.to_csv(out, sep='\t', index=False) - - bids_mappings = [ - BidsMapping( - sessionfolder / 'anat_source', - {bidsses/'anat'/'sub-02_T1w.nii.gz'}, - 'anat', - {'bids': {'run': '<<>>'}, 'attributes': {'SeriesDescription': 't1'} } - ), - BidsMapping( - sessionfolder / 'func_source', - { - bidsses / 'func' / 'sub-02_task-dummy_bold.nii.gz', - bidsses / 'func' / 'sub-02_task-dummy_part-phase_bold.nii.gz' - }, - 'func', - {'bids': {'run': '<<>>'}, 'attributes': {'SeriesDescription': 'bold'}} - ) - ] - - expected_df = pd.DataFrame( - { - "subject": ["sub-01", "sub-01", "sub-01", "sub-02", "sub-02", "sub-02"], - "session": ['NaN', 'NaN', 'NaN', 'NaN', 'NaN', 'NaN'], - "SeriesDescription": ["t1", "bold", "bold", "t1", "bold", "bold"], - "source": [ - str(Path(outdir) / file) for outdir, file in [ - ("sub-01", "anat_source"), - ("sub-01", "func_source"), - ("sub-01", "func_source"), - ("sub-02", "anat_source"), - ("sub-02", "func_source"), - ("sub-02", "func_source"), - ] - ], - "BIDS_mapping": [ - str(Path(outdir) / file) for outdir, file in [ - ("anat", "sub-01_T1w.nii.gz"), - ("func", "sub-01_task-dummy_bold.nii.gz"), - ("func", "sub-01_task-dummy_part-phase_bold.nii.gz"), - ("anat", "sub-02_T1w.nii.gz"), - ("func", "sub-02_task-dummy_bold.nii.gz"), - ("func", "sub-02_task-dummy_part-phase_bold.nii.gz") - ] - ] - } - ) - - # Run the function - bids.add_bids_mappings(bids_mappings, sessionfolder, bidsfolder, bidsses) - - # Check the results - assert out.is_file() is True - result_df = pd.read_csv(out, sep='\t') - result_df['session'].fillna('NaN', inplace=True) - assert result_df.equals(expected_df) - - -def test_add_bids_mappings__session(tmp_path): - """Test creating new 'bids_mappings.tsv' with session.""" - - sessionfolder = tmp_path / 'source' / 'sub-01' / 'ses-01' - bidsfolder = tmp_path / 'bids' - bidsses = tmp_path / 'bids' / 'sub-01' / 'ses-01' - out = bidsfolder / "code" / "bidscoin" / "bids_mappings.tsv" - - bids_mappings = [ - BidsMapping( - sessionfolder / 'anat_source', - {bidsses / 'anat' / 'sub-01_ses-01_T1w.nii.gz'}, - 'anat', - {'bids': {'run': '<<>>'}, 'attributes': {'SeriesDescription': 't1'}} - ), - BidsMapping( - sessionfolder / 'func_source', - { - bidsses / 'func' / 'sub-01_ses-01_task-dummy_bold.nii.gz', - bidsses / 'func' / 'sub-01_ses-01_task-dummy_part-phase_bold.nii.gz' - }, - 'func', - {'bids': {'run': '<<>>'}, 'attributes': {'SeriesDescription': 'bold'}} - ) - ] - expected_df = pd.DataFrame( - { - "subject": ["sub-01", "sub-01", "sub-01"], - "session": ['ses-01', 'ses-01', 'ses-01'], - "SeriesDescription": ["t1", "bold", "bold"], - "source": [str(Path("ses-01") / src) for src in ["anat_source", "func_source", "func_source"]], - "BIDS_mapping": [ - str(Path(outdir) / file) for outdir, file in [ - ("anat", "sub-01_ses-01_T1w.nii.gz"), - ("func", "sub-01_ses-01_task-dummy_bold.nii.gz"), - ("func", "sub-01_ses-01_task-dummy_part-phase_bold.nii.gz") - ] - ] - } - ) - - # Run the function - bids.add_bids_mappings(bids_mappings, sessionfolder, bidsfolder, bidsses) - - # Check the results - assert out.is_file() is True - result_df = pd.read_csv(out, sep='\t') - assert result_df.equals(expected_df) - - -def test_add_bids_mappings__derivatives(tmp_path): - """Test creating 'bids_mappings.tsv' with derivatives.""" - - sessionfolder = tmp_path / 'source' / 'sub-01' - bidsfolder = tmp_path / 'bids' - bidsses = tmp_path / 'bids' / 'sub-01' - out = bidsfolder / "code" / "bidscoin" / "bids_mappings.tsv" - - bids_mappings = [ - BidsMapping( - sessionfolder / 'fmap_source', - {bidsfolder / 'derivatives' / 'SIEMENS' / 'fmap' / 'sub-01_TB1RFM.nii.gz'}, - 'fmap', - {'bids': {'run': '<<>>'}, 'attributes': {'SeriesDescription': 't1'}} - ) - ] - expected_df = pd.DataFrame( - { - "subject": ["sub-01"], - "session": ['NaN'], - "SeriesDescription": ["t1"], - "source": [str(Path("sub-01") / "fmap_source")], - "BIDS_mapping": [str(Path("derivatives") / "SIEMENS" / "fmap" / "sub-01_TB1RFM.nii.gz")] - } - ) - - # Run the function - bids.add_bids_mappings(bids_mappings, sessionfolder, bidsfolder, bidsses) - - # Check the results - assert out.is_file() is True - result_df = pd.read_csv(out, sep='\t') - result_df['session'].fillna('NaN', inplace=True) - assert result_df.equals(expected_df) - - -def test_drop_session_from_bids_mappings__session_dropped(tmp_path): - - out = tmp_path / 'bids' / "code" / "bidscoin" / "bids_mappings.tsv" - existing_df = pd.DataFrame( - { - "subject": ["sub-01", "sub-01", "sub-01"], - "session": [None, None, None], - "SeriesDescription": ["t1", "bold", "bold"], - "source": [str(Path("sub-01") / src) for src in ["anat_source", "func_source", "func_source"]], - "BIDS_mapping": [ - str(Path(outdir) / file) for outdir, file in [ - ("anat", "sub-01_T1w.nii.gz"), - ("func", "sub-01_task-dummy_bold.nii.gz"), - ("func", "sub-01_task-dummy_part-phase_bold.nii.gz") - ] - ] - } - ) - out.parent.mkdir(parents=True) - existing_df.to_csv(out, sep='\t', index=False) - expected_df = pd.DataFrame( - { - "subject": ["sub-01", "sub-01", "sub-01"], - "SeriesDescription": ["t1", "bold", "bold"], - "source": [str(Path("sub-01") / src) for src in ["anat_source", "func_source", "func_source"]], - "BIDS_mapping": [ - str(Path(outdir) / file) for outdir, file in [ - ("anat", "sub-01_T1w.nii.gz"), - ("func", "sub-01_task-dummy_bold.nii.gz"), - ("func", "sub-01_task-dummy_part-phase_bold.nii.gz") - ] - ] - } - ) - - # Run the function - bids.drop_session_from_bids_mappings(out) - - # Check the results - assert out.is_file() is True - result_df = pd.read_csv(out, sep='\t') - assert result_df.equals(expected_df) - - -def test_drop_session_from_bids_mappings__session_not_dropped(tmp_path): - - out = tmp_path / 'bids' / "code" / "bidscoin" / "bids_mappings.tsv" - expected_df = pd.DataFrame( - { - "subject": ["sub-01", "sub-01", "sub-01"], - "session": ['ses-01', 'ses-01', 'ses-01'], - "SeriesDescription": ["t1", "bold", "bold"], - "source": [str(Path("ses-01") / src) for src in ["anat_source", "func_source", "func_source"]], - "BIDS_mapping": [ - str(Path(outdir) / file) for outdir, file in [ - ("anat", "sub-01_ses-01_T1w.nii.gz"), - ("func", "sub-01_ses-01_task-dummy_bold.nii.gz"), - ("func", "sub-01_ses-01_task-dummy_part-phase_bold.nii.gz") - ] - ] - } - ) - out.parent.mkdir(parents=True) - expected_df.to_csv(out, sep='\t', index=False) - - # Run the function - bids.drop_session_from_bids_mappings(out) - - # Check the results - assert out.is_file() is True - result_df = pd.read_csv(out, sep='\t') - assert result_df.equals(expected_df) - - def test_get_bidsname(raw_dicomdir): dicomfile = raw_dicomdir/'Doe^Archibald'/'01-XR C Spine Comp Min 4 Views'/'001-Cervical LAT'/'6154'