Skip to content

Commit

Permalink
change writing of data field in mpes template
Browse files Browse the repository at this point in the history
  • Loading branch information
lukaspie committed Nov 30, 2023
1 parent 8197fd1 commit c8584c9
Show file tree
Hide file tree
Showing 6 changed files with 300 additions and 141 deletions.
202 changes: 112 additions & 90 deletions pynxtools/dataconverter/readers/xps/config/config_sle_specs.json

Large diffs are not rendered by default.

53 changes: 40 additions & 13 deletions pynxtools/dataconverter/readers/xps/sle/sle_specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,33 +210,60 @@ def _update_xps_dict_with_spectrum(self, spectrum, key_map):
self._xps_dict[f'{path_map["analyser"]}/name'] = spectrum["devices"][0]
self._xps_dict[f'{path_map["source"]}/name'] = spectrum["devices"][1]

# Create keys for writing to data and detector
entry = construct_entry_name(region_parent)
self._xps_dict["data"][entry] = xr.Dataset()

scan_key = construct_data_key(spectrum)
detector_data_key_child = construct_detector_data_key(spectrum)

energy = np.array(spectrum["data"]["x"])

channels = [key for key in spectrum["data"] if "cps_ch_" in key]
# If multiple spectra exist to entry, only create a new
# xr.Dataset if the entry occurs for the first time.
if entry not in self._xps_dict["data"]:
self._xps_dict["data"][entry] = xr.Dataset()

for channel in channels:
ch_no = channel.rsplit("_")[-1]
channel_key = f"{scan_key}_chan_{ch_no}"
cps = np.array(spectrum["data"][channel])
# Write averaged cycle data to 'data'.
all_scan_data = [
value
for key, value in self._xps_dict["data"][entry].items()
if scan_key.split("_")[0] in key
]
averaged_scans = np.mean(all_scan_data, axis=0)
if averaged_scans.size == 1:
# on first scan in cycle
averaged_scans = spectrum["data"]["cps_calib"]

self._xps_dict["data"][entry][channel_key] = xr.DataArray(
data=cps, coords={"energy": energy}
try:
self._xps_dict["data"][entry][scan_key.split("_")[0]] = xr.DataArray(
data=averaged_scans,
coords={"energy": energy},
)
# This error occurs if twice the same region is scanned with
# different step sizes.
except ValueError:
pass

# Write scan data to 'data'.
self._xps_dict["data"][entry][scan_key] = xr.DataArray(
data=spectrum["data"]["cps_calib"], coords={"energy": energy}
)

detector_data_key_child = construct_detector_data_key(spectrum)
detector_data_key = f'{path_map["detector"]}/{detector_data_key_child}/counts'

self._xps_dict[detector_data_key] = spectrum["data"]["cps_calib"]
channels = [key for key in spectrum["data"] if "cps_ch_" in key]
for channel in channels:
ch_no = channel.rsplit("_")[-1]
channel_key = f"{scan_key}_chan_{ch_no}"
detector_data_key = (
f"{path_map['detector']}/{detector_data_key_child}"
f"_channels_Channel_{ch_no}/counts"
)
cps = np.array(spectrum["data"][channel])

# Write raw data to detector.
self._xps_dict[detector_data_key] = spectrum["data"]["cps_calib"]
# Write channel data to 'data'.
self._xps_dict["data"][entry][channel_key] = xr.DataArray(
data=cps, coords={"energy": energy}
)

class SleProdigyParser(ABC):
"""
Expand Down
45 changes: 35 additions & 10 deletions pynxtools/dataconverter/readers/xps/txt/txt_scienta.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,26 +166,51 @@ def _update_xps_dict_with_spectrum(self, spectrum, key_map):
mpes_key = spectrum_key
self._xps_dict[f"{root}/{mpes_key}"] = spectrum[spectrum_key]

# Create keys for writing to data and detector
entry = construct_entry_name(region_parent)
self._xps_dict["data"][entry] = xr.Dataset()

scan_key = construct_data_key(spectrum)
detector_data_key_child = construct_detector_data_key(spectrum)
detector_data_key = f'{path_map["detector"]}/{detector_data_key_child}/counts'

# Write raw data to detector.
self._xps_dict[detector_data_key] = spectrum["data"]["y"]

# If multiple spectra exist to entry, only create a new
# xr.Dataset if the entry occurs for the first time.
if entry not in self._xps_dict["data"]:
self._xps_dict["data"][entry] = xr.Dataset()

energy = np.array(spectrum["data"]["x"])
intensity = spectrum["data"]["y"]

channel_key = f"{scan_key}_chan_0"
self._xps_dict["data"][entry][channel_key] = xr.DataArray(
data=spectrum["data"]["y"], coords={"energy": energy}
# Write to data in order: scan, cycle, channel

# Write averaged cycle data to 'data'.
all_scan_data = [
value
for key, value in self._xps_dict["data"][entry].items()
if scan_key.split("_")[0] in key
]
averaged_scans = np.mean(all_scan_data, axis=0)
if averaged_scans.size == 1:
# on first scan in cycle
averaged_scans = intensity

self._xps_dict["data"][entry][scan_key.split("_")[0]] = xr.DataArray(
data=averaged_scans,
coords={"energy": energy},
)

# Write scan data to 'data'.
self._xps_dict["data"][entry][scan_key] = xr.DataArray(
data=spectrum["data"]["y"], coords={"energy": energy}
data=intensity, coords={"energy": energy}
)

detector_data_key_child = construct_detector_data_key(spectrum)
detector_data_key = f'{path_map["detector"]}/{detector_data_key_child}/counts'

self._xps_dict[detector_data_key] = spectrum["data"]["y"]
# Write channel data to 'data'.
channel_key = f"{scan_key}_chan_0"
self._xps_dict["data"][entry][channel_key] = xr.DataArray(
data=intensity, coords={"energy": energy}
)


class ScientaTxtHelper:
Expand Down
34 changes: 27 additions & 7 deletions pynxtools/dataconverter/readers/xps/txt/txt_vamas_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,21 +161,41 @@ def _update_xps_dict_with_spectrum(self, spectrum, key_map):
except KeyError:
pass

# Create keys for writing to data and detector
entry = construct_entry_name(region_parent)
self._xps_dict["data"][entry] = xr.Dataset()

scan_key = construct_data_key(spectrum)
detector_data_key_child = construct_detector_data_key(spectrum)
detector_data_key = f'{path_map["detector"]}/{detector_data_key_child}/counts'

energy = np.array(spectrum["data"]["binding_energy"])
intensity = np.array(spectrum["data"]["intensity"])

# If multiple spectra exist to entry, only create a new
# xr.Dataset if the entry occurs for the first time.
if entry not in self._xps_dict["data"]:
self._xps_dict["data"][entry] = xr.Dataset()

# Write averaged cycle data to 'data'.
all_scan_data = [
value
for key, value in self._xps_dict["data"][entry].items()
if scan_key.split("_")[0] in key
]
averaged_scans = np.mean(all_scan_data, axis=0)
if averaged_scans.size == 1:
# on first scan in cycle
averaged_scans = intensity

self._xps_dict["data"][entry][scan_key.split("_")[0]] = xr.DataArray(
data=averaged_scans,
coords={"energy": energy},
)

self._xps_dict["data"][entry][scan_key] = xr.DataArray(
data=spectrum["data"]["intensity"], coords={"energy": energy}
data=intensity, coords={"energy": energy}
)

detector_data_key_child = construct_detector_data_key(spectrum)
detector_data_key = f'{path_map["detector"]}/{detector_data_key_child}/counts'

self._xps_dict[detector_data_key] = spectrum["data"]["intensity"]
self._xps_dict[detector_data_key] = intensity


class TextParser(ABC): # pylint: disable=too-few-public-methods
Expand Down
89 changes: 73 additions & 16 deletions pynxtools/dataconverter/readers/xps/vms/vamas.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from copy import deepcopy
import datetime
from abc import ABC, abstractmethod
from itertools import groupby
import xarray as xr
import numpy as np

Expand All @@ -40,6 +41,9 @@ class VamasMapper(XPSMapper):
Class for restructuring .txt data file from
Vamas format into python dictionary.
"""

config_file = "config_vms.json"

def __init__(self):
self.file = None
self.parsers = [
Expand Down Expand Up @@ -175,32 +179,51 @@ def _update_xps_dict_with_spectrum(self, spectrum, key_map):
mpes_key = spectrum_key
self._xps_dict[f"{root}/{mpes_key}"] = spectrum[spectrum_key]

# Create keys for writing to data and detector
entry = construct_entry_name(region_parent)
self._xps_dict["data"][entry] = xr.Dataset()

scan_key = construct_data_key(spectrum)
detector_data_key_child = construct_detector_data_key(spectrum)
detector_data_key = f'{path_map["detector"]}/{detector_data_key_child}/counts'

energy = np.array(spectrum["data"]["x"])
intensity = np.array(spectrum["data"]["y"])

channels = [key for key in spectrum["data"] if "cps_ch_" in key]

for channel in channels:
ch_no = channel.rsplit("_")[-1]
channel_key = f"{scan_key}_chan_{ch_no}"
cps = np.array(spectrum["data"][channel])
if entry not in self._xps_dict["data"]:
self._xps_dict["data"][entry] = xr.Dataset()

self._xps_dict["data"][entry][channel_key] = xr.DataArray(
data=cps, coords={"energy": energy}
)
# Write averaged cycle data to 'data'.
all_scan_data = [
np.array(value)
for key, value in self._xps_dict["data"][entry].items()
if scan_key.split("_")[0] in key
]

# Write averaged cycle data to 'data'.
averaged_scans = np.mean(all_scan_data, axis=0)
if averaged_scans.size == 1:
# on first scan in cycle
averaged_scans = intensity
if entry == "3 S1110, UHV, RT, Epass = 30 eV__VB":
self._xps_dict["data"][entry][scan_key.split("_")[0]] = xr.DataArray(
data=averaged_scans,
coords={"energy": energy},
)

try:
self._xps_dict["data"][entry][scan_key.split("_")[0]] = xr.DataArray(
data=averaged_scans,
coords={"energy": energy},
)
except ValueError:
pass

# Write scan data to 'data'.
self._xps_dict["data"][entry][scan_key] = xr.DataArray(
data=spectrum["data"]["cps_calib"], coords={"energy": energy}
data=intensity, coords={"energy": energy}
)

detector_data_key_child = construct_detector_data_key(spectrum)
detector_data_key = f'{path_map["detector"]}/{detector_data_key_child}/counts'

self._xps_dict[detector_data_key] = spectrum["data"]["cps_calib"]
# Write raw intensities to 'detector'.
self._xps_dict[detector_data_key] = intensity


class VamasParser(ABC):
Expand Down Expand Up @@ -471,6 +494,38 @@ def _parse_map_block(self):
"""
return Block()

def _get_scan_numbers_for_spectra(self, spectra):
"""
For a flat list of spectra, groupby group name and spectrum
type and iteratively give them scan numbers.
Parameters
----------
spectra : list
List of dicts with each dict containing data and metadata
for one spectrum.
Returns
-------
flattened_spectra : list
Same list of dicts, but each spectrum gets a scan number.
"""

grouped_spectra = [list(y) for x,y in groupby(
sorted(spectra,
key=lambda x: (x['group_name'],x['spectrum_type'])),
lambda x: (x['group_name'],x['spectrum_type']))]

for group in grouped_spectra:
for i, spectrum in enumerate(group):
spectrum["scan_no"] = i

flattened_spectra = [spectrum for group in grouped_spectra for spectrum in group]

return flattened_spectra


def build_list(self):
"""
Construct a list of dictionaries from the Vamas objects
Expand Down Expand Up @@ -568,6 +623,8 @@ def build_list(self):
spec_dict.update(settings)
spectra += [spec_dict]

spectra = self._get_scan_numbers_for_spectra(spectra)

return spectra


Expand Down
18 changes: 13 additions & 5 deletions pynxtools/dataconverter/readers/xps/xy/xy_specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ class XyMapperSpecs(XPSMapper):
Class for restructuring .xy data file from
Specs vendor into python dictionary.
"""

config_file = "config_specs_xy.json"

def __init__(self):
super().__init__()
self.write_channels_to_data = True
Expand Down Expand Up @@ -183,15 +186,16 @@ def _update_xps_dict_with_spectrum(self, spectrum, key_map):

x_units = spectrum["x_units"]
energy = np.array(spectrum["data"]["x"])
intensity = np.array(spectrum["data"]["y"])

if entry not in self._xps_dict["data"]:
self._xps_dict["data"][entry] = xr.Dataset()

# Write raw data to detector.
self._xps_dict[detector_data_key] = spectrum["data"]["y"]
self._xps_dict[detector_data_key] = intensity

if not self.parser.export_settings["Separate Channel Data"]:
averaged_channels = spectrum["data"]["y"]
averaged_channels = intensity
else:
all_channel_data = [
value
Expand All @@ -201,7 +205,7 @@ def _update_xps_dict_with_spectrum(self, spectrum, key_map):
averaged_channels = np.mean(all_channel_data, axis=0)

if not self.parser.export_settings["Separate Scan Data"]:
averaged_scans = spectrum["data"]["y"]
averaged_scans = intensity
else:
all_scan_data = [
value
Expand All @@ -210,12 +214,15 @@ def _update_xps_dict_with_spectrum(self, spectrum, key_map):
]
averaged_scans = np.mean(all_scan_data, axis=0)

# Writing order: scan, cycle, channel data
# Write to data in order: scan, cycle, channel

# Write averaged cycle data to 'data'.
self._xps_dict["data"][entry][scan_key.split("_")[0]] = xr.DataArray(
data=averaged_scans,
coords={x_units: energy},
)
if self.parser.export_settings["Separate Scan Data"]:
# Write average cycle data to 'data'.
self._xps_dict["data"][entry][scan_key] = xr.DataArray(
data=averaged_channels,
coords={x_units: energy},
Expand All @@ -225,10 +232,11 @@ def _update_xps_dict_with_spectrum(self, spectrum, key_map):
self.parser.export_settings["Separate Channel Data"]
and self.write_channels_to_data
):
# Write channel data to 'data'.
channel_no = spectrum["channel_no"]
self._xps_dict["data"][entry][
f"{scan_key}_chan{channel_no}"
] = xr.DataArray(data=spectrum["data"]["y"], coords={x_units: energy})
] = xr.DataArray(data=intensity, coords={x_units: energy})


class XyProdigyParser: # pylint: disable=too-few-public-methods
Expand Down

0 comments on commit c8584c9

Please sign in to comment.