Skip to content

Commit

Permalink
Merge pull request #134 from Slautin/main
Browse files Browse the repository at this point in the history
corrected version of the gwyddion.py (syntax error checked) and changing the output of the IgorIBWReader to a dictionary.
  • Loading branch information
ramav87 authored Jul 19, 2024
2 parents 75d72ed + 7ce41ac commit b0e7526
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 34 deletions.
18 changes: 8 additions & 10 deletions SciFiReaders/readers/microscopy/spm/afm/gwyddion.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,24 +132,22 @@ def gsf_read(self):
# If an error occurs, fall back to using "a.u"
data_set.units = "a.u"
data_set.quantity = metadata['Title']

try:
units = metadata['XYUnits'],
except Exception as e:
# If an error occurs, fall back to using "a.u"
units= "a.u"

#Add dimension info
data_set.set_dimension(0, sid.Dimension(np.linspace(0, metadata['XReal'], num_cols),
name = 'x',
try:
units = metadata['XYUnits'],
except Exception as e:
# If an error occurs, fall back to using "a.u"
units= "a.u",
units= units,
quantity = 'x',
dimension_type='spatial'))
data_set.set_dimension(1, sid.Dimension(np.linspace(0, metadata['YReal'], num_rows),
name = 'y',
try:
units = metadata['XYUnits'],
except Exception as e:
# If an error occurs, fall back to using "a.u"
units= "a.u",
units = units,
quantity = 'y',
dimension_type='spatial'))

Expand Down
13 changes: 10 additions & 3 deletions SciFiReaders/readers/microscopy/spm/afm/igor_ibw.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,12 +302,13 @@ def read(self, verbose=False, parm_encoding='utf-8'):
# Get the data to figure out if this is an image or a force curve
images = ibw_wave.get('wData')

datasets = [] #list of sidpy datasets
datasets = {}#[] #list of sidpy datasets

if images.shape[-1] != len(chan_labels):
chan_labels = chan_labels[1:] # for layer 0 null set errors in older AR software
chan_units = chan_units[1:]

channel_number = 0
if images.ndim == 3: # Image stack
if verbose:
print('Found image stack of size {}'.format(images.shape))
Expand Down Expand Up @@ -339,7 +340,10 @@ def read(self, verbose=False, parm_encoding='utf-8'):
data_set.data_type = 'image'

#Finally, append it
datasets.append(data_set)
#datasets.append(data_set)
key_channel = f"Channel_{int(channel_number):03d}"
datasets[key_channel] = data_set
channel_number += 1

else: # single force curve
if verbose:
Expand Down Expand Up @@ -384,7 +388,10 @@ def read(self, verbose=False, parm_encoding='utf-8'):
data_set.original_metadata = parm_dict

#Add dataset to list
datasets.append(data_set)
#datasets.append(data_set)
key_channel = f"Channel_{int(channel_number):03d}"
datasets[key_channel] = data_set
channel_number += 1

# Return the dataset
return datasets
Expand Down
44 changes: 23 additions & 21 deletions tests/readers/microscopy/spm/afm/test_igor.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def test_igor_matrix_file_image(self):
assert dataset.data_descriptor==true_dd, "Data descriptor was expected to be {} \
but received {}".format(true_dd, dataset.data_descriptor)
dimension_sizes = [58,256]
dimension_types = [sidpy.DimensionType.SPATIAL,sidpy.DimensionType.SPATIAL]
dimension_types = [sidpy.DimensionType.SPATIAL, sidpy.DimensionType.SPATIAL]
for dim in dataset._axes:
dimension = dataset._axes[dim]
assert dimension.size == dimension_sizes[dim]
Expand Down Expand Up @@ -117,7 +117,7 @@ def test_load_test_ibw_force_file(self):
data_translator = sr.IgorIBWReader(file_path)
datasets = data_translator.read(verbose=False)
assert len(datasets)==3, "Length of dataset should be 3 but is instead {}".format(len(datasets))
metadata = datasets[0].original_metadata
metadata = datasets['Channel_000'].original_metadata
data_descriptors = ['Raw (m)', 'Defl (m)', 'ZSnsr (m)']

original_metadata ={'MostPosZVoltage': 150,
Expand Down Expand Up @@ -756,15 +756,16 @@ def test_load_test_ibw_force_file(self):
assert original_metadata[key] == metadata[key], "Metadata incorrect for key {}, should be {} " \
"but was read as {}".format(key, original_metadata[key], metadata[key])

for ind in range(len(datasets)):
assert type(datasets[ind])== sidpy.sid.dataset.Dataset, "Dataset No. {} not read in as sidpy dataset" \
"but was instead read in as {}".format(ind, type(datasets[ind]))
#for ind in range(len(datasets)):
for ind, key in enumerate(datasets):
assert type(datasets[key])== sidpy.sid.dataset.Dataset, "Dataset No. {} not read in as sidpy dataset" \
"but was instead read in as {}".format(ind, type(datasets[key]))

assert datasets[ind].shape[0]==1261, "Dataset[{}] is of size 1261 but was read in as {}".format(ind, datasets[ind].shape[0])
assert type(datasets[ind]._axes[0]) == sidpy.sid.dimension.Dimension, "Dataset should have dimension type " \
"of sidpy Dimension, but is instead {}".format(type(datasets[ind]._axes))
assert datasets[ind].data_descriptor == data_descriptors[ind], "Dataset {} " \
"should have descriptor {} but instead has descriptor {}".format(ind, data_descriptors[ind], datasets[ind].data_descriptor)
assert datasets[key].shape[0]==1261, "Dataset[{}] is of size 1261 but was read in as {}".format(ind, datasets[key].shape[0])
assert type(datasets[key]._axes[0]) == sidpy.sid.dimension.Dimension, "Dataset should have dimension type " \
"of sidpy Dimension, but is instead {}".format(type(datasets[key]._axes))
assert datasets[key].data_descriptor == data_descriptors[ind], "Dataset {} " \
"should have descriptor {} but instead has descriptor {}".format(ind, data_descriptors[ind], datasets[key].data_descriptor)

os.remove(file_path)

Expand Down Expand Up @@ -1412,7 +1413,7 @@ def test_load_test_ibw_image_file(self):
'modDate': 3692884718,
'bname': b'BTFO_DSO_Thick0000'}

metadata = datasets[0].original_metadata
metadata = datasets['Channel_000'].original_metadata

data_labels = [['x (m)', 'y (m)'],
['x (m)', 'y (m)'],
Expand All @@ -1427,18 +1428,19 @@ def test_load_test_ibw_image_file(self):
"but was read as {}".format(key, original_metadata[key], metadata[key])


for ind in range(len(datasets)):
assert type(datasets[ind])== sidpy.sid.dataset.Dataset, "Dataset No. {} not read in as sidpy dataset" \
"but was instead read in as {}".format(ind, type(datasets[ind]))
#for ind in range(len(datasets)):
for ind,key in enumerate(datasets):
assert type(datasets[key])== sidpy.sid.dataset.Dataset, "Dataset No. {} not read in as sidpy dataset" \
"but was instead read in as {}".format(ind, type(datasets[key]))

assert datasets[ind].labels == data_labels[ind], "Dataset {} label should be a {} but " \
"is instead {}".format(ind,data_labels[ind], datasets[ind].labels)
assert datasets[key].labels == data_labels[ind], "Dataset {} label should be a {} but " \
"is instead {}".format(ind,data_labels[ind], datasets[key].labels)

assert datasets[ind].shape==(256, 256), "Dataset[{}] is of size (256,256) but was read in as {}".format(ind, datasets[ind].shape)
assert type(datasets[ind]._axes[0]) == sidpy.sid.dimension.Dimension, "Dataset should have dimension type " \
"of sidpy Dimension, but is instead {}".format(type(datasets[ind]._axes))
assert datasets[key].shape==(256, 256), "Dataset[{}] is of size (256,256) but was read in as {}".format(ind, datasets[key].shape)
assert type(datasets[key]._axes[0]) == sidpy.sid.dimension.Dimension, "Dataset should have dimension type " \
"of sidpy Dimension, but is instead {}".format(type(datasets[key]._axes))

assert datasets[ind].data_descriptor == data_descriptors[ind], "Dataset {} " \
"should have descriptor {} but instead has descriptor {}".format(ind, data_descriptors[ind], datasets[ind].data_descriptor)
assert datasets[key].data_descriptor == data_descriptors[ind], "Dataset {} " \
"should have descriptor {} but instead has descriptor {}".format(ind, data_descriptors[ind], datasets[key].data_descriptor)

os.remove(file_path)

0 comments on commit b0e7526

Please sign in to comment.