Skip to content

Commit

Permalink
Merge pull request #215 from DominikaZ/fix-nonexistent-jsonfiles
Browse files Browse the repository at this point in the history
Add only existing jsonfiles to jsonfiles and make it set
  • Loading branch information
marcelzwiers authored Jan 11, 2024
2 parents b8a0a5d + 5c3b8de commit 55b1302
Showing 1 changed file with 8 additions and 6 deletions.
14 changes: 8 additions & 6 deletions bidscoin/plugins/dcm2niix2bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None
bidsname = bids.get_bidsname(subid, sesid, run, not bidsignore, runtime=True)
bidsignore = bidsignore or bids.check_ignore(bidsname+'.json', bidsmap['Options']['bidscoin']['bidsignore'], 'file')
bidsname = bids.increment_runindex(outfolder, bidsname, run, scans_table)
jsonfiles = [(outfolder/bidsname).with_suffix('.json')] # List -> Collect the associated json-files (for updating them later) -- possibly > 1
jsonfiles = set() # Set -> Collect the associated json-files (for updating them later) -- possibly > 1

# Check if the bidsname is valid
bidstest = (Path('/')/subid/sesid/datasource.datatype/bidsname).with_suffix('.json').as_posix()
Expand All @@ -293,6 +293,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None
try:
physiodata = physio.readphysio(sourcefile)
physio.physio2tsv(physiodata, outfolder/bidsname)
jsonfiles.update(outfolder.glob(f"{bidsname}.json")) # add existing created json files: bidsname.json
except Exception as physioerror:
LOGGER.error(f"Could not read/convert physiological file: {sourcefile}\n{physioerror}")
continue
Expand All @@ -308,6 +309,8 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None
if bcoin.run_command(command):
if not list(outfolder.glob(f"{bidsname}.*nii*")): continue

jsonfiles.update(outfolder.glob(f"{bidsname}.json")) # add existing created json files: bidsname.json

# Handle the ABCD GE pepolar sequence
extrafile = list(outfolder.glob(f"{bidsname}a.nii*"))
if extrafile:
Expand All @@ -320,7 +323,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None
LOGGER.verbose(f"Renaming GE reversed polarity image: {extrafile[0]} -> {invfile}")
extrafile[0].replace(invfile)
extrafile[0].with_suffix('').with_suffix('.json').replace(invfile.with_suffix('').with_suffix('.json'))
jsonfiles.append(invfile.with_suffix('').with_suffix('.json'))
jsonfiles.add(invfile.with_suffix('').with_suffix('.json'))
else:
LOGGER.warning(f"Unexpected variants of {outfolder/bidsname}* were produced by dcm2niix. Possibly this can be remedied by using the dcm2niix -i option (to ignore derived, localizer and 2D images) or by clearing the BIDS folder before running bidscoiner")

Expand All @@ -335,8 +338,7 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None
# Rename all files that got additional postfixes from dcm2niix. See: https://github.com/rordenlab/dcm2niix/blob/master/FILENAMING.md
dcm2niixpostfixes = ('_c', '_i', '_Eq', '_real', '_imaginary', '_MoCo', '_t', '_Tilt', '_e', '_ph', '_ADC', '_fieldmaphz') #_c%d, _e%d and _ph (and any combination of these in that order) are for multi-coil data, multi-echo data and phase data
dcm2niixfiles = sorted(set([dcm2niixfile for dcm2niixpostfix in dcm2niixpostfixes for dcm2niixfile in outfolder.glob(f"{bidsname}*{dcm2niixpostfix}*.nii*")]))
if not jsonfiles[0].is_file() and dcm2niixfiles: # Possibly renamed by dcm2niix, e.g. with multi-echo data (but not always for the first echo)
jsonfiles.pop(0)

for dcm2niixfile in dcm2niixfiles:

# Strip each dcm2niix postfix and assign it to bids entities in a newly constructed bidsname
Expand Down Expand Up @@ -431,12 +433,12 @@ def bidscoiner_plugin(session: Path, bidsmap: dict, bidsses: Path) -> Union[None
if oldjsonfile in jsonfiles:
jsonfiles.remove(oldjsonfile)
if newjsonfile not in jsonfiles:
jsonfiles.append(newjsonfile)
jsonfiles.add(newjsonfile)
for oldfile in outfolder.glob(dcm2niixfile.with_suffix('').stem + '.*'):
oldfile.replace(newjsonfile.with_suffix(''.join(oldfile.suffixes)))

# Loop over all the newly produced json sidecar-files and adapt the data (NB: assumes every NIfTI-file comes with a json-file)
for jsonfile in sorted(set(jsonfiles)):
for jsonfile in sorted(jsonfiles):

# Load / copy over the source meta-data
metadata = bids.updatemetadata(sourcefile, jsonfile, run['meta'], options['meta'], datasource)
Expand Down

0 comments on commit 55b1302

Please sign in to comment.