Skip to content

Commit

Permalink
Bugfix proper reading and handling of scan positions into calibrated …
Browse files Browse the repository at this point in the history
…scan positions
  • Loading branch information
markus.kuehbach committed Oct 25, 2023
1 parent fea7842 commit 0a2307f
Show file tree
Hide file tree
Showing 7 changed files with 120 additions and 61 deletions.
4 changes: 2 additions & 2 deletions pynxtools/dataconverter/readers/em/subparsers/hfive_apex.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,9 +233,9 @@ def parse_and_normalize_group_ebsd_data(self, fp, ckey: str):
self.tmp[ckey]["scan_point_x"] = np.asarray(
np.linspace(0, self.tmp[ckey]["n_x"] - 1,
num=self.tmp[ckey]["n_x"],
endpoint=True) * self.tmp[ckey]["s_x"] + 0., np.float32)
endpoint=True) * self.tmp[ckey]["s_x"], np.float32)

self.tmp[ckey]["scan_point_y"] = np.asarray(
np.linspace(0, self.tmp[ckey]["n_y"] - 1,
num=self.tmp[ckey]["n_y"],
endpoint=True) * self.tmp[ckey]["s_y"] + 0., np.float32)
endpoint=True) * self.tmp[ckey]["s_y"], np.float32)
37 changes: 20 additions & 17 deletions pynxtools/dataconverter/readers/em/subparsers/hfive_bruker.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,16 +107,16 @@ def parse_and_normalize_group_ebsd_header(self, fp, ckey: str):
if f"{grp_name}" not in fp:
raise ValueError(f"Unable to parse {grp_name} !")

req_fields = ["NCOLS", "NROWS", "SEPixelSizeX", "SEPixelSizeY"]
req_fields = ["NCOLS", "NROWS", "XSTEP", "YSTEP"]
for req_field in req_fields:
if f"{grp_name}/{req_field}" not in fp:
raise ValueError(f"Unable to parse {grp_name}/{req_field} !")

self.tmp[ckey]["n_x"] = fp[f"{grp_name}/NCOLS"][()]
self.tmp[ckey]["n_y"] = fp[f"{grp_name}/NROWS"][()]
self.tmp[ckey]["s_x"] = fp[f"{grp_name}/SEPixelSizeX"][()]
self.tmp[ckey]["s_x"] = fp[f"{grp_name}/XSTEP"][()]
self.tmp[ckey]["s_unit"] = "um" # "µm" # TODO::always micron?
self.tmp[ckey]["s_y"] = fp[f"{grp_name}/SEPixelSizeY"][()]
self.tmp[ckey]["s_y"] = fp[f"{grp_name}/YSTEP"][()]
# TODO::check that all data are consistent
# TODO::what is y and x depends on coordinate system

Expand Down Expand Up @@ -189,7 +189,7 @@ def parse_and_normalize_group_ebsd_data(self, fp, ckey: str):
if f"{grp_name}" not in fp:
raise ValueError(f"Unable to parse {grp_name} !")

req_fields = ["phi1", "PHI", "phi2", "Phase", "X SAMPLE", "Y SAMPLE", "MAD"]
req_fields = ["phi1", "PHI", "phi2", "Phase", "MAD"]
for req_field in req_fields:
if f"{grp_name}/{req_field}" not in fp:
raise ValueError(f"Unable to parse {grp_name}/{req_field} !")
Expand Down Expand Up @@ -219,19 +219,22 @@ def parse_and_normalize_group_ebsd_data(self, fp, ckey: str):
else:
raise ValueError(f"{grp_name}/Phase has unexpected shape !")

# X
if np.shape(fp[f"{grp_name}/X SAMPLE"][:])[0] == n_pts:
self.tmp[ckey]["scan_point_x"] \
= np.asarray(fp[f"{grp_name}/X SAMPLE"][:], np.float32)
else:
raise ValueError(f"{grp_name}/X SAMPLE has unexpected shape !")

# Y
if np.shape(fp[f"{grp_name}/Y SAMPLE"][:])[0] == n_pts:
self.tmp[ckey]["scan_point_y"] \
= np.asarray(fp[f"{grp_name}/Y SAMPLE"], np.float32)
else:
raise ValueError(f"{grp_name}/Y SAMPLE has unexpected shape !")
# X and Y
# there is X SAMPLE and Y SAMPLE but these are not defined somewhere instead
# here adding x and y assuming that we scan first lines along positive x and then
# moving downwards along +y
self.tmp[ckey]["scan_point_x"] \
= np.asarray(np.tile(np.linspace(0.,
self.tmp[ckey]["n_x"] - 1.,
num=self.tmp[ckey]["n_x"],
endpoint=True) * self.tmp[ckey]["s_x"],
self.tmp[ckey]["n_y"]), np.float32)
self.tmp[ckey]["scan_point_y"] \
= np.asarray(np.repeat(np.linspace(0.,
self.tmp[ckey]["n_y"] - 1.,
num=self.tmp[ckey]["n_y"],
endpoint=True) * self.tmp[ckey]["s_y"],
self.tmp[ckey]["n_x"]), np.float32)

# Band Contrast is not stored in Bruker but Radon Quality or MAD
# but this is s.th. different as it is the mean angular deviation between
Expand Down
44 changes: 29 additions & 15 deletions pynxtools/dataconverter/readers/em/subparsers/hfive_ebsd.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,16 +108,16 @@ def parse_and_normalize_group_ebsd_header(self, fp, ckey: str):
if f"{grp_name}" not in fp:
raise ValueError(f"Unable to parse {grp_name} !")

req_fields = ["NCOLS", "NROWS", "SEPixelSizeX", "SEPixelSizeY"]
req_fields = ["NCOLS", "NROWS", "XSTEP", "YSTEP"]
for req_field in req_fields:
if f"{grp_name}/{req_field}" not in fp:
raise ValueError(f"Unable to parse {grp_name}/{req_field} !")

self.tmp[ckey]["n_x"] = fp[f"{grp_name}/NCOLS"][()]
self.tmp[ckey]["n_y"] = fp[f"{grp_name}/NROWS"][()]
self.tmp[ckey]["s_x"] = fp[f"{grp_name}/SEPixelSizeX"][()]
self.tmp[ckey]["s_x"] = fp[f"{grp_name}/XSTEP"][()]
self.tmp[ckey]["s_unit"] = "um" # "µm" # TODO::always micron?
self.tmp[ckey]["s_y"] = fp[f"{grp_name}/SEPixelSizeY"][()]
self.tmp[ckey]["s_y"] = fp[f"{grp_name}/YSTEP"][()]
# TODO::check that all data are consistent
# TODO::what is y and x depends on coordinate system
# TODO::why is SEPixelSize* half the value of *STEP for * X and Y respectively?
Expand Down Expand Up @@ -222,19 +222,33 @@ def parse_and_normalize_group_ebsd_data(self, fp, ckey: str):
else:
raise ValueError(f"{grp_name}/Phase has unexpected shape !")

# X
if np.shape(fp[f"{grp_name}/X SAMPLE"][:])[0] == n_pts:
self.tmp[ckey]["scan_point_x"] \
= np.asarray(fp[f"{grp_name}/X SAMPLE"][:], np.float32)
else:
raise ValueError(f"{grp_name}/X SAMPLE has unexpected shape !")
# X and Y
# there exist X SAMPLE and Y SAMPLE which give indeed calibrated coordinates
# relative to the sample coordinate system, ignore this for now an
# and TOD::just calibrate on image dimension
self.tmp[ckey]["scan_point_x"] \
= np.asarray(np.tile(np.linspace(0.,
self.tmp[ckey]["n_x"] - 1.,
num=self.tmp[ckey]["n_x"],
endpoint=True) * self.tmp[ckey]["s_x"],
self.tmp[ckey]["n_y"]), np.float32)
self.tmp[ckey]["scan_point_y"] \
= np.asarray(np.repeat(np.linspace(0.,
self.tmp[ckey]["n_y"] - 1.,
num=self.tmp[ckey]["n_y"],
endpoint=True) * self.tmp[ckey]["s_y"],
self.tmp[ckey]["n_x"]), np.float32)

# Y
if np.shape(fp[f"{grp_name}/Y SAMPLE"][:])[0] == n_pts:
self.tmp[ckey]["scan_point_y"] \
= np.asarray(fp[f"{grp_name}/Y SAMPLE"], np.float32)
else:
raise ValueError(f"{grp_name}/Y SAMPLE has unexpected shape !")
# if np.shape(fp[f"{grp_name}/X SAMPLE"][:])[0] == n_pts:
# self.tmp[ckey]["scan_point_x"] \
# = np.asarray(fp[f"{grp_name}/X SAMPLE"][:], np.float32)
# else:
# raise ValueError(f"{grp_name}/X SAMPLE has unexpected shape !")
# if np.shape(fp[f"{grp_name}/Y SAMPLE"][:])[0] == n_pts:
# self.tmp[ckey]["scan_point_y"] \
# = np.asarray(fp[f"{grp_name}/Y SAMPLE"], np.float32)
# else:
# raise ValueError(f"{grp_name}/Y SAMPLE has unexpected shape !")

# Band Contrast is not stored in Bruker but Radon Quality or MAD
# but this is s.th. different as it is the mean angular deviation between
Expand Down
27 changes: 23 additions & 4 deletions pynxtools/dataconverter/readers/em/subparsers/hfive_edax.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,26 @@ def parse_and_normalize_group_ebsd_data(self, fp, ckey: str):
self.tmp[ckey]["phase_id"] = np.asarray(fp[f"{grp_name}/Phase"][:], np.int32)
# promoting int8 to int32 no problem
self.tmp[ckey]["ci"] = np.asarray(fp[f"{grp_name}/CI"][:], np.float32)
self.tmp[ckey]["scan_point_x"] = np.asarray(
fp[f"{grp_name}/X Position"][:] * self.tmp[ckey]["s_x"] + 0., np.float32)
self.tmp[ckey]["scan_point_y"] = np.asarray(
fp[f"{grp_name}/Y Position"][:] * self.tmp[ckey]["s_y"] + 0., np.float32)
# normalize pixel coordinates to physical positions even though the origin can still dangle somewhere
# expected is order on x is first all possible x values while y == 0
# followed by as many copies of this linear sequence for each y increment
# tricky situation is that for one version pixel coordinates while in another case
# calibrated e.g. micron coordinates are reported that is in the first case px needs
# multiplication with step size in the other one must not multiple with step size
# as the step size has already been accounted for by the tech partner when writing!
if self.version["schema_version"] in ["OIM Analysis 8.5.1002 x64 [07-17-20]"]:
print(f"{self.version['schema_version']}, tech partner accounted for calibration")
self.tmp[ckey]["scan_point_x"] \
= np.asarray(fp[f"{grp_name}/X Position"][:], np.float32)
self.tmp[ckey]["scan_point_y"] \
= np.asarray(fp[f"{grp_name}/Y Position"][:], np.float32)
else:
print(f"{self.version['schema_version']}, parser has to do the calibration")
self.tmp[ckey]["scan_point_x"] = np.asarray(
fp[f"{grp_name}/X Position"][:] * self.tmp[ckey]["s_x"], np.float32)
self.tmp[ckey]["scan_point_y"] = np.asarray(
fp[f"{grp_name}/Y Position"][:] * self.tmp[ckey]["s_y"], np.float32)
print(f"xmin {np.min(self.tmp[ckey]['scan_point_x'])}," \
f"xmax {np.max(self.tmp[ckey]['scan_point_x'])}," \
f"ymin {np.min(self.tmp[ckey]['scan_point_y'])}," \
f"ymax {np.max(self.tmp[ckey]['scan_point_y'])}")
4 changes: 4 additions & 0 deletions pynxtools/dataconverter/readers/em/subparsers/hfive_oxford.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,10 @@ def parse_and_normalize_slice_ebsd_data(self, fp, ckey: str):
# no normalization needed, also in NXem_ebsd the null model notIndexed is phase_identifier 0
self.tmp[ckey]["phase_id"] = np.asarray(fp[f"{grp_name}/Phase"], np.int32)

# normalize pixel coordinates to physical positions even though the origin can still dangle somewhere
# expected is order on x is first all possible x values while y == 0
# followed by as many copies of this linear sequence for each y increment
# no action needed Oxford reports already the pixel coordinate multiplied by step
# X, no, H5T_NATIVE_FLOAT, (size, 1), X position of each pixel in micrometers (origin: top left corner)
self.tmp[ckey]["scan_point_x"] = np.asarray(fp[f"{grp_name}/X"], np.float32)
# inconsistency f32 in file although specification states float
Expand Down
62 changes: 40 additions & 22 deletions pynxtools/dataconverter/readers/em/subparsers/nxs_hfive.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,12 @@ def process_into_template(self, inp: dict, template: dict) -> dict:
self.process_roi_ebsd_maps(inp, template)
return template

def get_named_axis(self, inp: dict, dim_name: str):
return np.asarray(np.linspace(0,
inp[f"n_{dim_name}"] - 1,
num=inp[f"n_{dim_name}"],
endpoint=True) * inp[f"s_{dim_name}"], np.float32)

def process_roi_overview(self, inp: dict, template: dict) -> dict:
for ckey in inp.keys():
if ckey.startswith("ebsd") and inp[ckey] != {}:
Expand All @@ -180,6 +186,7 @@ def process_roi_overview_ebsd_based(self,
# prfx = f"/roi{roi_id}"
trg = f"/ENTRY[entry{self.entry_id}]/ROI[roi{roi_id}]/ebsd/indexing/DATA[roi]"
template[f"{trg}/title"] = f"Region-of-interest overview image"
template[f"{trg}/@NX_class"] = f"NXdata" # TODO::writer should decorate automatically!
template[f"{trg}/@signal"] = "data"
template[f"{trg}/@axes"] = ["axis_y", "axis_x"]
template[f"{trg}/@AXISNAME_indices[axis_x_indices]"] = np.uint32(0)
Expand All @@ -198,20 +205,23 @@ def process_roi_overview_ebsd_based(self,
raise ValueError(f"{__name__} unable to generate plot for {trg} !")
# 0 is y while 1 is x !
template[f"{trg}/data/@long_name"] = f"Signal"
template[f"{trg}/data/@CLASS"] = "IMAGE" # required by H5Web to plot RGB maps
template[f"{trg}/data/@CLASS"] = "IMAGE" # required H5Web, RGB map
template[f"{trg}/data/@IMAGE_VERSION"] = f"1.2"
template[f"{trg}/data/@SUBCLASS_VERSION"] = np.int64(15)

scan_unit = inp["s_unit"]
if scan_unit == "um":
scan_unit = "µm"
template[f"{trg}/AXISNAME[axis_x]"] \
= {"compress": np.asarray(inp["scan_point_x"], np.float32), "strength": 1}
= {"compress": self.get_named_axis(inp, "x"), "strength": 1}
template[f"{trg}/AXISNAME[axis_x]/@long_name"] \
= f"Coordinate along x-axis ({inp['s_unit']})"
template[f"{trg}/AXISNAME[axis_x]/@units"] = f"{inp['s_unit']}"
= f"Coordinate along x-axis ({scan_unit})"
template[f"{trg}/AXISNAME[axis_x]/@units"] = f"{scan_unit}"
template[f"{trg}/AXISNAME[axis_y]"] \
= {"compress": np.asarray(inp["scan_point_y"], np.float32), "strength": 1}
= {"compress": self.get_named_axis(inp, "y"), "strength": 1}
template[f"{trg}/AXISNAME[axis_y]/@long_name"] \
= f"Coordinate along y-axis ({inp['s_unit']})"
template[f"{trg}/AXISNAME[axis_y]/@units"] = f"{inp['s_unit']}"
= f"Coordinate along y-axis ({scan_unit})"
template[f"{trg}/AXISNAME[axis_y]/@units"] = f"{scan_unit}"
return template

def process_roi_ebsd_maps(self, inp: dict, template: dict) -> dict:
Expand All @@ -227,14 +237,19 @@ def process_roi_xmap(self, inp: dict, roi_id: int, template: dict) -> dict:
"""Process crystal orientation map from normalized orientation data."""
# for NeXus to create a default representation of the EBSD map to explore
self.xmap = None
self.axis_x = None
self.axis_y = None
if np.max((inp["n_x"], inp["n_y"])) < HFIVE_WEB_MAXIMUM_RGB:
# can use the map discretization as is
coordinates, _ = create_coordinate_arrays(
(inp["n_y"], inp["n_x"]), (inp["s_y"], inp["s_x"]))
xaxis = coordinates["x"]
yaxis = coordinates["y"]
print(f"xmi {np.min(xaxis)}, xmx {np.max(xaxis)}, ymi {np.min(yaxis)}, ymx {np.max(yaxis)}")
print(f"xmi {np.min(xaxis)}, xmx {np.max(xaxis)}, " \
f"ymi {np.min(yaxis)}, ymx {np.max(yaxis)}")
del coordinates
self.axis_x = self.get_named_axis(inp, "x")
self.axis_y = self.get_named_axis(inp, "y")
else:
raise ValueError(f"Downsampling for too large EBSD maps is currently not supported !")
# need to regrid to downsample too large maps
Expand Down Expand Up @@ -359,6 +374,7 @@ def process_roi_phase_inverse_pole_figures(self,
mpp = f"{trg}/DATA[map]"
template[f"{mpp}/title"] \
= f"Inverse pole figure {projection_directions[idx][0]} {phase_name}"
template[f"{mpp}/@NX_class"] = f"NXdata" # TODO::writer should decorate automatically!
template[f"{mpp}/@signal"] = "data"
template[f"{mpp}/@axes"] = ["axis_y", "axis_x"]
template[f"{mpp}/@AXISNAME_indices[axis_x_indices]"] = np.uint32(0)
Expand All @@ -368,22 +384,24 @@ def process_roi_phase_inverse_pole_figures(self,
template[f"{mpp}/DATA[data]/@IMAGE_VERSION"] = "1.2"
template[f"{mpp}/DATA[data]/@SUBCLASS_VERSION"] = np.int64(15)

template[f"{mpp}/AXISNAME[axis_x]"] \
= {"compress": np.asarray(self.xmap.x, np.float32), "strength": 1}
scan_unit = self.xmap.scan_unit
if scan_unit == "um":
scan_unit = "µm"
template[f"{mpp}/AXISNAME[axis_x]"] = {"compress": self.axis_x, "strength": 1}
template[f"{mpp}/AXISNAME[axis_x]/@long_name"] \
= f"Coordinate along x-axis ({self.xmap.scan_unit})"
template[f"{mpp}/AXISNAME[axis_x]/@units"] = f"{self.xmap.scan_unit}"
template[f"{mpp}/AXISNAME[axis_y]"] \
= {"compress": np.asarray(self.xmap.y, np.float32), "strength": 1}
= f"Coordinate along x-axis ({scan_unit})"
template[f"{mpp}/AXISNAME[axis_x]/@units"] = f"{scan_unit}"
template[f"{mpp}/AXISNAME[axis_y]"] = {"compress": self.axis_y, "strength": 1}
template[f"{mpp}/AXISNAME[axis_y]/@long_name"] \
= f"Coordinate along y-axis ({self.xmap.scan_unit})"
template[f"{mpp}/AXISNAME[axis_y]/@units"] = f"{self.xmap.scan_unit}"
= f"Coordinate along y-axis ({scan_unit})"
template[f"{mpp}/AXISNAME[axis_y]/@units"] = f"{scan_unit}"

# add the IPF color map legend/key
lgd = f"{trg}/DATA[legend]"
template[f"{lgd}/title"] \
= f"Inverse pole figure {projection_directions[idx][0]} {phase_name}"
# template[f"{trg}/title"] = f"Inverse pole figure color key with SST"
template[f"{lgd}/@NX_class"] = f"NXdata" # TODO::writer should decorate automatically!
template[f"{lgd}/@signal"] = "data"
template[f"{lgd}/@axes"] = ["axis_y", "axis_x"]
template[f"{lgd}/@AXISNAME_indices[axis_x_indices]"] = np.uint32(0)
Expand All @@ -394,17 +412,17 @@ def process_roi_phase_inverse_pole_figures(self,
template[f"{lgd}/data/@SUBCLASS_VERSION"] = np.int64(15)

template[f"{lgd}/AXISNAME[axis_x]"] \
= {"compress": np.asarray(np.linspace(1,
np.shape(img)[0],
num=np.shape(img)[0],
= {"compress": np.asarray(np.linspace(0,
np.shape(img)[1] - 1,
num=np.shape(img)[1],
endpoint=True), np.uint32),
"strength": 1}
template[f"{lgd}/AXISNAME[axis_x]/@long_name"] = "Pixel along x-axis"
template[f"{lgd}/AXISNAME[axis_x]/@units"] = "px"
template[f"{lgd}/AXISNAME[axis_y]"] \
= {"compress": np.asarray(np.linspace(1,
np.shape(img)[1],
num=np.shape(img)[1],
= {"compress": np.asarray(np.linspace(0,
np.shape(img)[0] - 1,
num=np.shape(img)[0],
endpoint=True), np.uint32),
"strength": 1}
template[f"{lgd}/AXISNAME[axis_y]/@long_name"] = "Pixel along y-axis"
Expand Down
3 changes: 2 additions & 1 deletion test.all.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@

# Examples="207_2081.edaxh5"
# Examples="173_0057.h5oina"
Examples="229_2097.oh5"
# oxford, bruker, britton, edax old noncali, edax old calib, apex
Examples="173_0057.h5oina 130_0003.h5 088_0009.h5 116_0014.h5 229_2097.oh5 207_2081.edaxh5"
for example in $Examples; do
echo $example
dataconverter --reader em --nxdl NXroot --input-file $example --output debug.$example.nxs 1>stdout.$example.nxs.txt 2>stderr.$example.nxs.txt
Expand Down

0 comments on commit 0a2307f

Please sign in to comment.