From 7e1cfd960e4ff1a955f8921529529d557c1d9780 Mon Sep 17 00:00:00 2001 From: mkuehbach Date: Mon, 4 Dec 2023 13:00:41 +0100 Subject: [PATCH] Manual merging in of fairmat 8620c4e into the em_refactoring branch to prepare with the resolving of possible merge conflicts --- .github/workflows/publish.yml | 1 + .github/workflows/pylint.yml | 4 +- .github/workflows/pytest.yml | 12 +- .gitignore | 4 +- .vscode/settings.json | 7 + MANIFEST.in | 4 +- README.md | 13 +- dev-requirements.txt | 20 +- examples/apm/Write.NXapm.Example.1.ipynb | 53 +- examples/apm/apm.oasis.specific.yaml | 1 + examples/apm/eln_data_apm.yaml | 142 +- examples/ellipsometry/eln_data.yaml | 5 +- .../em_nion/Write.NXem_nion.Example.1.ipynb | 14 +- .../em_om/Write.NXem_ebsd.Example.1.ipynb | 16 +- .../em_spctrscpy/Write.NXem.Example.1.ipynb | 8 +- examples/json_map/README.md | 36 + examples/json_map/merge_copied.mapping.json | 35 + examples/json_map/merge_linked.mapping.json | 25 + examples/sts/README.md | 32 + pynxtools/__init__.py | 68 + pynxtools/_build_wrapper.py | 71 + pynxtools/dataconverter/README.md | 21 +- pynxtools/dataconverter/convert.py | 247 ++- pynxtools/dataconverter/hdfdict.py | 11 +- pynxtools/dataconverter/helpers.py | 149 +- .../apm_deployment_specifics_to_nx_map.py | 52 + .../apm/map_concepts/apm_eln_to_nx_map.py | 109 + pynxtools/dataconverter/readers/apm/reader.py | 32 +- ...lots.py => apm_create_nx_default_plots.py} | 0 ...ase_selector.py => apm_define_io_cases.py} | 38 +- ...data.py => apm_generate_synthetic_data.py} | 2 +- .../readers/apm/utils/apm_generic_eln_io.py | 409 ---- .../utils/apm_load_deployment_specifics.py | 57 + .../readers/apm/utils/apm_load_generic_eln.py | 175 ++ ...{apm_ranging_io.py => apm_load_ranging.py} | 0 ...ction_io.py => apm_load_reconstruction.py} | 0 .../apm/utils/apm_parse_composition_table.py | 179 ++ .../dataconverter/readers/ellips/reader.py | 34 +- .../{concepts => map_concepts}/README.md | 0 .../swift_display_items_to_nx.py} | 0 .../swift_eln_to_nx_map.py} | 0 .../swift_to_nx_image_ang_space.py} | 0 .../swift_to_nx_image_real_space.py} | 0 .../swift_to_nx_spectrum_eels.py} | 0 .../dataconverter/readers/em_nion/reader.py | 4 +- .../{versioning.py => em_nion_versioning.py} | 0 ...e_selector.py => swift_define_io_cases.py} | 0 ...xes.py => swift_generate_dimscale_axes.py} | 2 +- ...ic_eln_io.py => swift_load_generic_eln.py} | 8 +- .../utils/swift_zipped_project_parser.py | 12 +- .../readers/em_om/utils/image_transform.py | 9 +- .../dataconverter/readers/example/reader.py | 7 +- .../dataconverter/readers/json_map/README.md | 53 +- .../dataconverter/readers/json_map/reader.py | 75 +- .../dataconverter/readers/mpes/reader.py | 39 +- .../readers/rii_database/reader.py | 20 +- .../map_concepts/mapping_functors.py} | 0 .../readers/shared/shared_utils.py | 12 +- .../readers/transmission/reader.py | 3 +- pynxtools/dataconverter/readers/utils.py | 22 +- pynxtools/dataconverter/readers/xrd/README.md | 40 + .../dataconverter/readers/xrd/__init__.py | 15 + pynxtools/dataconverter/readers/xrd/config.py | 117 ++ pynxtools/dataconverter/readers/xrd/reader.py | 176 ++ .../dataconverter/readers/xrd/xrd_helper.py | 293 +++ .../dataconverter/readers/xrd/xrd_parser.py | 448 ++++ pynxtools/dataconverter/template.py | 32 +- pynxtools/dataconverter/writer.py | 40 +- pynxtools/eln_mapper/README.md | 19 + .../apm_utils.py => eln_mapper/__init__.py} | 10 - pynxtools/eln_mapper/eln.py | 189 ++ pynxtools/eln_mapper/eln_mapper.py | 75 + pynxtools/eln_mapper/scheme_eln.py | 281 +++ pynxtools/nexus/nexus.py | 44 +- pynxtools/nexus/nxdl_utils.py | 3 + pyproject.toml | 20 +- tests/data/dataconverter/NXtest.nxdl.xml | 3 + .../nxapm.schema.archive.yaml | 422 ++-- .../readers/ellips/eln_data.yaml | 3 - .../dataconverter/readers/json_map/data.json | 3 +- .../readers/json_map/data.mapping.json | 3 +- .../readers/mpes/Ref_nexus_mpes.log | 1869 ++++++++++++----- .../readers/mpes/config_file.json | 15 +- .../readers/xrd/ACZCTS_5-60_181.xrdml | 106 + tests/data/eln_mapper/eln.yaml | 103 + .../data/eln_mapper/mpes.scheme.archive.yaml | 537 +++++ tests/data/nexus/NXtest2.nxdl.xml | 455 ++++ tests/data/nexus/Ref_nexus_test.log | 551 +++-- tests/dataconverter/test_convert.py | 4 +- tests/dataconverter/test_helpers.py | 161 +- tests/dataconverter/test_readers.py | 29 + tests/eln_mapper/__init__.py | 16 + tests/eln_mapper/test_eln_mapper.py | 107 + tests/nexus/test_nexus.py | 19 +- tests/nexus/test_version.py | 16 + 95 files changed, 6832 insertions(+), 1744 deletions(-) create mode 100644 examples/apm/apm.oasis.specific.yaml create mode 100644 examples/json_map/README.md create mode 100644 examples/json_map/merge_copied.mapping.json create mode 100644 examples/json_map/merge_linked.mapping.json create mode 100644 examples/sts/README.md create mode 100644 pynxtools/_build_wrapper.py create mode 100644 pynxtools/dataconverter/readers/apm/map_concepts/apm_deployment_specifics_to_nx_map.py create mode 100644 pynxtools/dataconverter/readers/apm/map_concepts/apm_eln_to_nx_map.py rename pynxtools/dataconverter/readers/apm/utils/{apm_nexus_plots.py => apm_create_nx_default_plots.py} (100%) rename pynxtools/dataconverter/readers/apm/utils/{apm_use_case_selector.py => apm_define_io_cases.py} (65%) rename pynxtools/dataconverter/readers/apm/utils/{apm_example_data.py => apm_generate_synthetic_data.py} (99%) delete mode 100644 pynxtools/dataconverter/readers/apm/utils/apm_generic_eln_io.py create mode 100644 pynxtools/dataconverter/readers/apm/utils/apm_load_deployment_specifics.py create mode 100644 pynxtools/dataconverter/readers/apm/utils/apm_load_generic_eln.py rename pynxtools/dataconverter/readers/apm/utils/{apm_ranging_io.py => apm_load_ranging.py} (100%) rename pynxtools/dataconverter/readers/apm/utils/{apm_reconstruction_io.py => apm_load_reconstruction.py} (100%) create mode 100644 pynxtools/dataconverter/readers/apm/utils/apm_parse_composition_table.py rename pynxtools/dataconverter/readers/em_nion/{concepts => map_concepts}/README.md (100%) rename pynxtools/dataconverter/readers/em_nion/{concepts/swift_display_items_to_nx_concepts.py => map_concepts/swift_display_items_to_nx.py} (100%) rename pynxtools/dataconverter/readers/em_nion/{concepts/generic_eln_mapping.py => map_concepts/swift_eln_to_nx_map.py} (100%) rename pynxtools/dataconverter/readers/em_nion/{concepts/nx_image_ang_space.py => map_concepts/swift_to_nx_image_ang_space.py} (100%) rename pynxtools/dataconverter/readers/em_nion/{concepts/nx_image_real_space.py => map_concepts/swift_to_nx_image_real_space.py} (100%) rename pynxtools/dataconverter/readers/em_nion/{concepts/nx_spectrum_eels.py => map_concepts/swift_to_nx_spectrum_eels.py} (100%) rename pynxtools/dataconverter/readers/em_nion/utils/{versioning.py => em_nion_versioning.py} (100%) rename pynxtools/dataconverter/readers/em_nion/utils/{use_case_selector.py => swift_define_io_cases.py} (100%) rename pynxtools/dataconverter/readers/em_nion/utils/{swift_dimscale_axes.py => swift_generate_dimscale_axes.py} (96%) rename pynxtools/dataconverter/readers/em_nion/utils/{em_generic_eln_io.py => swift_load_generic_eln.py} (95%) rename pynxtools/dataconverter/readers/{em_nion/concepts/swift_handle_nx_concepts.py => shared/map_concepts/mapping_functors.py} (100%) create mode 100644 pynxtools/dataconverter/readers/xrd/README.md create mode 100644 pynxtools/dataconverter/readers/xrd/__init__.py create mode 100644 pynxtools/dataconverter/readers/xrd/config.py create mode 100644 pynxtools/dataconverter/readers/xrd/reader.py create mode 100644 pynxtools/dataconverter/readers/xrd/xrd_helper.py create mode 100644 pynxtools/dataconverter/readers/xrd/xrd_parser.py create mode 100644 pynxtools/eln_mapper/README.md rename pynxtools/{dataconverter/readers/apm/utils/apm_utils.py => eln_mapper/__init__.py} (59%) create mode 100644 pynxtools/eln_mapper/eln.py create mode 100644 pynxtools/eln_mapper/eln_mapper.py create mode 100644 pynxtools/eln_mapper/scheme_eln.py create mode 100644 tests/data/dataconverter/readers/xrd/ACZCTS_5-60_181.xrdml create mode 100644 tests/data/eln_mapper/eln.yaml create mode 100644 tests/data/eln_mapper/mpes.scheme.archive.yaml create mode 100644 tests/data/nexus/NXtest2.nxdl.xml create mode 100644 tests/eln_mapper/__init__.py create mode 100644 tests/eln_mapper/test_eln_mapper.py create mode 100644 tests/nexus/test_version.py diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index b222fc5cf..24b408529 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -19,6 +19,7 @@ jobs: steps: - uses: actions/checkout@v3 with: + fetch-depth: 0 submodules: recursive - name: Set up Python uses: actions/setup-python@v4 diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 8ef2b0e10..f1b418fbe 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -7,10 +7,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.10 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: "3.10" - name: Install dependencies run: | git submodule sync --recursive diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 575788512..ce6cd000e 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -13,12 +13,14 @@ jobs: pytest: runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - python_version: ["3.8", "3.9", "3.10"] + python_version: ["3.8", "3.9", "3.10", "3.11"] steps: - uses: actions/checkout@v3 with: + fetch-depth: 0 submodules: recursive - name: Set up Python ${{ matrix.python_version }} uses: actions/setup-python@v4 @@ -26,20 +28,16 @@ jobs: python-version: ${{ matrix.python_version }} - name: Install dependencies run: | - git submodule sync --recursive - git submodule update --init --recursive --jobs=4 python -m pip install --upgrade pip python -m pip install coverage coveralls - name: Install package run: | - python -m pip install --no-deps . - - name: Install dev requirements - run: | - python -m pip install -r dev-requirements.txt + pip install ".[dev]" - name: Test with pytest run: | coverage run -m pytest -sv --show-capture=no tests - name: Submit to coveralls + continue-on-error: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | diff --git a/.gitignore b/.gitignore index 4b1dad0c2..abaa80d10 100644 --- a/.gitignore +++ b/.gitignore @@ -88,6 +88,8 @@ cover/ # Django stuff: *.log +!tests/data/dataconverter/readers/mpes/Ref_nexus_mpes.log +!tests/data/nexus/Ref_nexus_test.log local_settings.py db.sqlite3 db.sqlite3-journal @@ -203,7 +205,7 @@ build/ nexusparser.egg-info/PKG-INFO .python-version -# reader specific custom settings +# em-reader-specific custom settings *.h5 *.oh5 *.edaxh5 diff --git a/.vscode/settings.json b/.vscode/settings.json index 299e6fb07..94dada964 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -23,6 +23,13 @@ "pynxtools", "tests" ], + "[python]": { + "editor.defaultFormatter": "ms-python.black-formatter", + "editor.formatOnSave": false, + "editor.codeActionsOnSave": { + "source.organizeImports": false + } + }, "python.testing.pytestArgs": ["tests"], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true diff --git a/MANIFEST.in b/MANIFEST.in index 0e91894ff..3c768c9ff 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,6 @@ recursive-include pynxtools/definitions/base_classes/ *.xml recursive-include pynxtools/definitions/applications/ *.xml recursive-include pynxtools/definitions/contributed_definitions/ *.xml -include pynxtools/definitions/ *.xsd +include pynxtools/definitions/*.xsd +include pynxtools/nexus-version.txt +include pynxtools/definitions/NXDL_VERSION \ No newline at end of file diff --git a/README.md b/README.md index b034fddb7..d121714ce 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ It allows to develop ontologies and to create ontological instances based on the # Installation -It is recommended to use python 3.8 with a dedicated virtual environment for this package. +It is recommended to use python 3.10 with a dedicated virtual environment for this package. Learn how to manage [python versions](https://github.com/pyenv/pyenv) and [virtual environments](https://realpython.com/python-virtual-environments-a-primer/). @@ -71,6 +71,17 @@ Especially relevant for developers, there exists a basic test framework written python -m pytest -sv tests ``` +## Run examples + +A number of examples exist which document how the tools can be used. For a standalone +usage convenient jupyter notebooks are available for each tool. To use them jupyter +and related tools have to be installed in the development environment as follows: + +```shell +python -m pip install jupyter +python -m pip install jupyterlab +python -m pip install jupyterlab_h5web +``` # Questions, suggestions? To ask further questions, to make suggestions how we can improve these tools, to get advice diff --git a/dev-requirements.txt b/dev-requirements.txt index b8f75ce0b..a4f1ffed2 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -23,6 +23,8 @@ attrs==22.1.0 # requests-cache backcall==0.2.0 # via ipython +blosc2==2.3.2 + # via tables build==0.10.0 # via pip-tools cattrs==22.2.0 @@ -113,7 +115,7 @@ hyperspy==1.7.5 # pyxem idna==3.4 # via requests -ifes-apt-tc-data-modeling==0.0.9 +ifes-apt-tc-data-modeling==0.1 # via pynxtools (pyproject.toml) imageio==2.22.1 # via @@ -192,14 +194,20 @@ matplotlib-scalebar==0.8.1 # via orix mccabe==0.7.0 # via pylint +mergedeep==1.3.4 + # via pynxtools (pyproject.toml) mpmath==1.2.1 # via sympy +msgpack==1.0.7 + # via blosc2 mypy==0.982 # via pynxtools (pyproject.toml) mypy-extensions==0.4.3 # via mypy natsort==8.2.0 # via hyperspy +ndindex==1.7 + # via blosc2 nest-asyncio==1.5.6 # via # ipykernel @@ -238,9 +246,11 @@ numexpr==2.8.3 # via # hyperspy # pyfai + # tables numpy==1.21.6 # via # ase + # blosc2 # dask # diffsims # fabio @@ -271,6 +281,7 @@ numpy==1.21.6 # scipy # silx # sparse + # tables # tifffile # xarray # zarr @@ -293,6 +304,7 @@ packaging==21.3 # pooch # pytest # scikit-image + # tables pandas==1.3.5 # via # ifes-apt-tc-data-modeling @@ -341,6 +353,10 @@ ptyprocess==0.7.0 # via pexpect py==1.11.0 # via pytest +py-cpuinfo==9.0.0 + # via + # blosc2 + # tables pycifrw==4.4.5 # via diffpy-structure pycodestyle==2.9.1 @@ -448,6 +464,8 @@ sympy==1.10.1 # via # hyperspy # radioactivedecay +tables==3.9.2 + # via ifes-apt-tc-data-modeling threadpoolctl==3.1.0 # via scikit-learn tifffile==2021.11.2 diff --git a/examples/apm/Write.NXapm.Example.1.ipynb b/examples/apm/Write.NXapm.Example.1.ipynb index efae762e3..cb11f3c41 100644 --- a/examples/apm/Write.NXapm.Example.1.ipynb +++ b/examples/apm/Write.NXapm.Example.1.ipynb @@ -195,6 +195,20 @@ "### **Step 3c**: Convert the files in the example into an NXapm-compliant NeXus/HDF5 file." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "
\n", + "The deployment_specific YAML file can be used to simplify a specific ELN, e.g. to avoid having to type
\n", + "repetitive information. A typical use case is an OASIS in a laboratory where there is a fixed number of
\n", + "instruments for which many details stay the same, these can be carried over via a *.oasis.specific.yaml file.
\n", + "It is the responsibility of the OASIS maintainer to document and version these specific configurations.
\n", + "We would be happy to learn how this functionality is useful and learn about your feedback to improve
\n", + "this feature to make working with NeXus and ELNs more convenient.
\n", + "
" + ] + }, { "cell_type": "code", "execution_count": null, @@ -204,20 +218,22 @@ "outputs": [], "source": [ "#parser-nexus/tests/data/tools/dataconverter/readers/em_om/\n", + "import numpy as np\n", "eln_data_file_name = [\"eln_data_apm.yaml\"]\n", + "deployment_specific = [\"apm.oasis.specific.yaml\"]\n", "input_recon_file_name = [\"Si.apt\",\n", " \"Si.epos\",\n", " \"Si.pos\",\n", - " \"R31_06365-v02.pos\",\n", - " \"R18_58152-v02.epos\",\n", - " \"70_50_50.apt\"]\n", + "# \"R31_06365-v02.pos\",\n", + "# \"R18_58152-v02.epos\",\n", + "# \"70_50_50.apt\"]\n", "# \"R56_01769-v01.pos\"]\n", "input_range_file_name = [\"Si.RRNG\",\n", " \"Si.RNG\",\n", " \"Si.RNG\",\n", - " \"R31_06365-v02.rrng\",\n", - " \"R31_06365-v02.rrng\",\n", - " \"R31_06365-v02.rrng\"]\n", + "# \"R31_06365-v02.rrng\",\n", + "# \"R31_06365-v02.rrng\",\n", + "# \"R31_06365-v02.rrng\"]\n", "# \"R56_01769.rng.fig.txt\"]\n", "output_file_name = [\"apm.case1.nxs\",\n", " \"apm.case2.nxs\",\n", @@ -225,14 +241,15 @@ " \"apm.case4.nxs\",\n", " \"apm.case5.nxs\",\n", " \"apm.case6.nxs\"]\n", - "for case_id in [0]:\n", + "for case_id in np.arange(0, 3):\n", " ELN = eln_data_file_name[0]\n", + " OASIS = deployment_specific[0]\n", " INPUT_RECON = input_recon_file_name[case_id]\n", " INPUT_RANGE = input_range_file_name[case_id]\n", " OUTPUT = output_file_name[case_id]\n", "\n", - " ! dataconverter --reader apm --nxdl NXapm --input-file $ELN --input-file \\\n", - " $INPUT_RECON --input-file $INPUT_RANGE --output $OUTPUT" + " ! dataconverter --reader apm --nxdl NXapm --input-file $ELN --input-file $OASIS \\\n", + " --input-file $INPUT_RECON --input-file $INPUT_RANGE --output $OUTPUT" ] }, { @@ -258,7 +275,7 @@ "outputs": [], "source": [ "# H5Web(OUTPUT)\n", - "H5Web(\"apm.case1.nxs\")" + "H5Web(\"apm.case3.nxs\")" ] }, { @@ -417,7 +434,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "
\n", + "
\n", "Currently, this functionality requires a Python environment with a newer version of the ase library than the one
\n", "which is used by the installation of pynxtools (which is currently ase==3.19.0). Instead, ase>=3.22.1 should be used.
\n", "The issue with the specific functionalities used in the *create_reconstructed_positions* function is that when using
\n", @@ -448,12 +465,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "
\n", - "This functionality uses recent features of ase which demands an environment that is currently not supported
\n", + "
\n", + "This functionality uses recent features of ase which demands an environment that is not necessarily supported
\n", "by NOMAD OASIS. As the here exemplified settings for this example are configured to represent an environment
\n", - "matching close to NOMAD users who are interested in this developer functionality should do the following:
\n", + "matching one which is close to NOMAD, users who are interested in this dev functionality should do the following:
\n", "Run this example in a standalone environment where ase is upgraded to the latest version and then use
\n", "the generated NeXus files either as is or upload them to NOMAD OASIS.
\n", + "If the above-mentioned cell detects e.g. that a recent version of ase was installed
\n", + "(e.g. >3.22.x) then the code in the following cell can be executed without issues.
\n", "
" ] }, @@ -465,7 +484,7 @@ }, "outputs": [], "source": [ - "# ! dataconverter --reader apm --nxdl NXapm --input-file synthesize1 --output apm.case0.nxs" + "! dataconverter --reader apm --nxdl NXapm --input-file synthesize1 --output apm.case0.nxs" ] }, { @@ -496,7 +515,7 @@ "metadata": {}, "source": [ "### Contact person for the apm reader and related examples in FAIRmat:\n", - "Markus Kühbach, 2023/05
\n", + "Markus Kühbach, 2023/08/31
\n", "\n", "### Funding\n", "FAIRmat is a consortium on research data management which is part of the German NFDI.
\n", @@ -527,7 +546,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.16" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/examples/apm/apm.oasis.specific.yaml b/examples/apm/apm.oasis.specific.yaml new file mode 100644 index 000000000..82394f07e --- /dev/null +++ b/examples/apm/apm.oasis.specific.yaml @@ -0,0 +1 @@ +location: Leoben diff --git a/examples/apm/eln_data_apm.yaml b/examples/apm/eln_data_apm.yaml index 11e29ced4..ddd67ebcf 100644 --- a/examples/apm/eln_data_apm.yaml +++ b/examples/apm/eln_data_apm.yaml @@ -1,82 +1,118 @@ atom_probe: analysis_chamber_pressure: unit: torr - value: 1.0e-10 + value: 2.0e-10 control_software_program: IVAS - control_software_program__attr_version: 3.6.4 - fabrication_capabilities: n/a - fabrication_identifier: n/a + control_software_program__attr_version: 3.6.8 + fabrication_identifier: '12' fabrication_model: LEAP3000 - fabrication_vendor: AMETEK/Cameca + fabrication_vendor: Cameca + field_of_view: + unit: nm + value: 20. flight_path_length: unit: m - value: 0.9 - instrument_name: LEAP 3000 - ion_detector_model: cameca - ion_detector_name: none + value: 1.2 + instrument_name: LEAP + ion_detector_model: n/a + ion_detector_name: n/a ion_detector_serial_number: n/a ion_detector_type: mcp_dld - local_electrode_name: electrode 1 + local_electrode_name: L1 + location: Denton pulser: - laser_source_name: laser - laser_source_power: - unit: W - value: 2.0e-08 - laser_source_pulse_energy: - unit: J - value: 1.2e-11 - laser_source_wavelength: - unit: m - value: 4.8e-07 - pulse_fraction: 0.1 + laser_source: + - name: laser1 + power: + unit: nW + value: 24.0 + pulse_energy: + unit: pJ + value: 24.0 + wavelength: + unit: nm + value: 355.0 + - name: laser2 + power: + unit: nW + value: 12.0 + pulse_energy: + unit: pJ + value: 12.0 + wavelength: + unit: nm + value: 254.0 + pulse_fraction: 0.8 pulse_frequency: unit: kHz - value: 250 - pulse_mode: laser + value: 250.0 + pulse_mode: laser_and_voltage reflectron_applied: true - specimen_monitoring_detection_rate: 0.6 + specimen_monitoring_detection_rate: 0.8 specimen_monitoring_initial_radius: unit: nm - value: 30 + value: 12.0 specimen_monitoring_shank_angle: unit: ° - value: 5 + value: 5.0 stage_lab_base_temperature: unit: K - value: 30 + value: 20.0 status: success entry: - attr_version: nexus-fairmat-proposal successor of 9636feecb79bb32b828b1a9804269573256d7696 - definition: NXapm - end_time: '2022-09-22T20:00:00+00:00' - experiment_description: some details for nomad, ODS steel precipitates for testing - a developmental clustering algorithm called OPTICS. - experiment_identifier: R31-06365-v02 + experiment_description: '

Normal

+ +

Bold

+ +

Italics

' + experiment_identifier: Si test + start_time: '2023-06-11T11:20:00+00:00' + end_time: '2023-06-11T11:20:00+00:00' + run_number: '2121' operation_mode: apt - program: IVAS - program__attr_version: 3.6.4 - run_number: '6365' - start_time: '2022-09-20T20:00:00+00:00' ranging: program: IVAS - program__attr_version: 3.6.4 + program__attr_version: 3.6.8 reconstruction: crystallographic_calibration: n/a - parameter: kf = 1.8, ICF = 1.02, Vat = 60 at/nm^3 + parameter: kf = 1.8, icf = 3.3 program: IVAS - program__attr_version: 3.6.4 - protocol_name: cameca + program__attr_version: 3.6.8 + protocol_name: bas +sample: + composition: + - Mo + - Al 12 +- 3 + - B 50 ppm +- 12 + - C 3.6 + grain_diameter: + unit: µm + value: 200.0 + grain_diameter_error: + unit: µm + value: 50.0 + heat_treatment_quenching_rate: + unit: K / s + value: 150.0 + heat_treatment_quenching_rate_error: + unit: K / s + value: 10.0 + heat_treatment_temperature: + unit: K + value: 600.0 + heat_treatment_temperature_error: + unit: K + value: 20.0 specimen: - atom_types: - - Fe - - Cr - - Y - - O - description: ODS steel, i.e. material with Y2O3 dispersoids - name: ODS-Specimen 1 - preparation_date: '2022-09-12T20:01:00+00:00' - sample_history: undocumented - short_title: ODS + alias: Si + description: '

normal

+ +

bold

+ +

italics

' + is_polycrystalline: true + name: usa_denton_smith_si + preparation_date: '2023-06-11T12:51:00+00:00' user: -- name: Jing Wang -- name: Daniel Schreiber +- {} +- {} diff --git a/examples/ellipsometry/eln_data.yaml b/examples/ellipsometry/eln_data.yaml index 70b708ef3..f20f75861 100644 --- a/examples/ellipsometry/eln_data.yaml +++ b/examples/ellipsometry/eln_data.yaml @@ -5,7 +5,7 @@ Data: data_software/version: '3.882' data_type: Psi/Delta spectrum_type: wavelength - spectrum_unit: Angstroms + spectrum_unit: angstrom Instrument: Beam_path: Detector: @@ -58,9 +58,6 @@ colnames: - Delta - err.Psi - err.Delta -definition: NXellipsometry -definition/@url: https://github.com/FAIRmat-NFDI/nexus_definitions/blob/fairmat/contributed_definitions/NXellipsometry.nxdl.xml -definition/@version: 0.0.2 derived_parameter_type: depolarization experiment_description: RC2 scan on 2nm SiO2 on Si in air experiment_identifier: exp-ID diff --git a/examples/em_nion/Write.NXem_nion.Example.1.ipynb b/examples/em_nion/Write.NXem_nion.Example.1.ipynb index af08fdd0e..0d48dea69 100644 --- a/examples/em_nion/Write.NXem_nion.Example.1.ipynb +++ b/examples/em_nion/Write.NXem_nion.Example.1.ipynb @@ -88,7 +88,15 @@ "metadata": {}, "outputs": [], "source": [ - "! wget https://www.zenodo.org/record/7986279/files/ger_berlin_haas_nionswift_multimodal.zip\n", + "! wget https://www.zenodo.org/record/7986279/files/ger_berlin_haas_nionswift_multimodal.zip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "zp.ZipFile(\"ger_berlin_haas_nionswift_multimodal.zip\").extractall(path=\"\", members=None, pwd=None)" ] }, @@ -240,7 +248,7 @@ "metadata": {}, "source": [ "### Contact person for the em_nion reader and related examples in FAIRmat:\n", - "Markus Kühbach, 2023/05
\n", + "Markus Kühbach, 2023/08/31
\n", "\n", "### Funding\n", "FAIRmat is a consortium on research data management which is part of the German NFDI.
\n", @@ -271,7 +279,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/examples/em_om/Write.NXem_ebsd.Example.1.ipynb b/examples/em_om/Write.NXem_ebsd.Example.1.ipynb index dd62925fb..7f5afeb6e 100644 --- a/examples/em_om/Write.NXem_ebsd.Example.1.ipynb +++ b/examples/em_om/Write.NXem_ebsd.Example.1.ipynb @@ -259,11 +259,13 @@ "cell_type": "code", "execution_count": null, "metadata": { + "scrolled": true, "tags": [] }, "outputs": [], "source": [ "#parser-nexus/tests/data/tools/dataconverter/readers/em_om/\n", + "import numpy as np\n", "eln_data_file_name = [\"eln_data_em_om.yaml\"]\n", "input_data_file_name = [\"PrcShanghaiShi.EBSPs70deg.zip\",\n", " \"H5OINA_examples_Specimen_1_Map_EDS_+_EBSD_Map_Data_2.h5oina\",\n", @@ -273,7 +275,7 @@ " \"em_om.case2.nxs\",\n", " \"em_om.case3e.nxs\",\n", " \"em_om.case4.nxs\"]\n", - "for case_id in [4]: # [0, 1, 2, 3]:\n", + "for case_id in np.arange(0, 3 + 1):\n", " ELN = eln_data_file_name[0]\n", " INPUT = input_data_file_name[case_id]\n", " OUTPUT = output_file_name[case_id]\n", @@ -305,10 +307,10 @@ "source": [ "# H5Web(OUTPUT)\n", "H5Web(\"em_om.case0.nxs\")\n", - "H5Web(\"em_om.case1.nxs\")\n", - "H5Web(\"em_om.case2.nxs\")\n", - "H5Web(\"em_om.case3e.nxs\")\n", - "H5Web(\"em_om.case4.nxs\")" + "# H5Web(\"em_om.case1.nxs\")\n", + "# H5Web(\"em_om.case2.nxs\")\n", + "# H5Web(\"em_om.case3e.nxs\")\n", + "# H5Web(\"em_om.case4.nxs\")" ] }, { @@ -338,7 +340,7 @@ "metadata": {}, "source": [ "### Contact person for the apm reader and related examples in FAIRmat:\n", - "Markus Kühbach, 2023/05
\n", + "Markus Kühbach, 2023/08/31
\n", "\n", "### Funding\n", "FAIRmat is a consortium on research data management which is part of the German NFDI.
\n", @@ -362,7 +364,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.16" + "version": "3.10.12" }, "vscode": { "interpreter": { diff --git a/examples/em_spctrscpy/Write.NXem.Example.1.ipynb b/examples/em_spctrscpy/Write.NXem.Example.1.ipynb index 61b0f33d3..3b57b7f9f 100644 --- a/examples/em_spctrscpy/Write.NXem.Example.1.ipynb +++ b/examples/em_spctrscpy/Write.NXem.Example.1.ipynb @@ -239,9 +239,9 @@ "outputs": [], "source": [ "# H5Web(OUTPUT)\n", - "# H5Web(\"em_sp.case1.nxs\")\n", + "H5Web(\"em_sp.case1.nxs\")\n", "# H5Web(\"em_sp.case2.nxs\")\n", - "H5Web(\"em_sp.case3.nxs\")" + "# H5Web(\"em_sp.case3.nxs\")" ] }, { @@ -305,7 +305,7 @@ "metadata": {}, "source": [ "### Contact person for the apm reader and related examples in FAIRmat:\n", - "Markus Kühbach, 2023/05
\n", + "Markus Kühbach, 2023/08/31
\n", "\n", "### Funding\n", "FAIRmat is a consortium on research data management which is part of the German NFDI.
\n", @@ -336,7 +336,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.16" + "version": "3.10.12" }, "vscode": { "interpreter": { diff --git a/examples/json_map/README.md b/examples/json_map/README.md new file mode 100644 index 000000000..788cb6890 --- /dev/null +++ b/examples/json_map/README.md @@ -0,0 +1,36 @@ +# JSON Map Reader + +## What is this reader? + +This reader is designed to allow users of pynxtools to convert their existing data with the help of a map file. The map file tells the reader what to pick from your data files and convert them to FAIR NeXus files. The following formats are supported as input files: +* HDF5 (any extension works i.e. h5, hdf5, nxs, etc) +* JSON +* Python Dict Objects Pickled with [pickle](https://docs.python.org/3/library/pickle.html). These can contain [xarray.DataArray](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.html) objects as well as regular Python types and Numpy types. + +It accepts any NXDL file that you like as long as your mapping file contains all the fields. +Please use the --generate-template function of the dataconverter to create a .mapping.json file. + +```console +user@box:~$ dataconverter --nxdl NXmynxdl --generate-template > mynxdl.mapping.json +``` +##### Details on the [mapping.json](/pynxtools/dataconverter/readers/json_map/README.md#the-mappingjson-file) file. + +## How to run these examples? + +### Automatically merge partial NeXus files +```console +user@box:~$ dataconverter --nxdl NXiv_temp --input-file voltage_and_temperature.nxs --input-file current.nxs --output auto_merged.nxs +``` + +### Map and copy over data to new NeXus file +```console +user@box:~$ dataconverter --nxdl NXiv_temp --mapping merge_copied.mapping.json --input-file voltage_and_temperature.nxs --input-file current.nxs --output merged_copied.nxs +``` + +### Map and link over data to new NeXus file +```console +user@box:~$ dataconverter --nxdl NXiv_temp --mapping merge_linked.mapping.json --input-file voltage_and_temperature.nxs --input-file current.nxs --output merged_linked.nxs +``` + +## Contact person in FAIRmat for this reader +Sherjeel Shabih diff --git a/examples/json_map/merge_copied.mapping.json b/examples/json_map/merge_copied.mapping.json new file mode 100644 index 000000000..bba897874 --- /dev/null +++ b/examples/json_map/merge_copied.mapping.json @@ -0,0 +1,35 @@ +{ + "/@default": "entry", + "/ENTRY[entry]/DATA[data]/current": "/entry/data/current", + "/ENTRY[entry]/DATA[data]/current_295C": "/entry/data/current_295C", + "/ENTRY[entry]/DATA[data]/current_300C": "/entry/data/current_300C", + "/ENTRY[entry]/DATA[data]/current_305C": "/entry/data/current_305C", + "/ENTRY[entry]/DATA[data]/current_310C": "/entry/data/current_310C", + "/ENTRY[entry]/DATA[data]/temperature": "/entry/data/temperature", + "/ENTRY[entry]/DATA[data]/voltage": "/entry/data/voltage", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/voltage_controller/calibration_time": "/entry/instrument/environment/voltage_controller/calibration_time", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/voltage_controller/run_control": "/entry/instrument/environment/voltage_controller/run_control", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/voltage_controller/value": "/entry/instrument/environment/voltage_controller/value", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/temperature_controller/calibration_time": "/entry/instrument/environment/temperature_controller/calibration_time", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/temperature_controller/run_control": "/entry/instrument/environment/temperature_controller/run_control", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/temperature_controller/value": "/entry/instrument/environment/temperature_controller/value", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/current_sensor/calibration_time": "/entry/instrument/environment/current_sensor/calibration_time", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/current_sensor/run_control": "/entry/instrument/environment/current_sensor/run_control", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/current_sensor/value": "/entry/instrument/environment/current_sensor/value", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/independent_controllers": ["voltage_controller", "temperature_control"], + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/measurement_sensors": ["current_sensor"], + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/NXpid[heating_pid]/description": "/entry/instrument/environment/heating_pid/description", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/NXpid[heating_pid]/setpoint": "/entry/instrument/environment/heating_pid/setpoint", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/NXpid[heating_pid]/K_p_value": "/entry/instrument/environment/heating_pid/K_p_value", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/NXpid[heating_pid]/K_i_value": "/entry/instrument/environment/heating_pid/K_i_value", + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/NXpid[heating_pid]/K_d_value": "/entry/instrument/environment/heating_pid/K_d_value", + "/ENTRY[entry]/PROCESS[process]/program": "Bluesky", + "/ENTRY[entry]/PROCESS[process]/program/@version": "1.6.7", + "/ENTRY[entry]/SAMPLE[sample]/name": "super", + "/ENTRY[entry]/SAMPLE[sample]/atom_types": "Si, C", + "/ENTRY[entry]/definition": "NXiv_temp", + "/ENTRY[entry]/definition/@version": "1", + "/ENTRY[entry]/experiment_identifier": "dbdfed37-35ed-4aee-a465-aaa0577205b1", + "/ENTRY[entry]/experiment_description": "A simple IV temperature experiment.", + "/ENTRY[entry]/start_time": "2022-05-30T16:37:03.909201+02:00" +} \ No newline at end of file diff --git a/examples/json_map/merge_linked.mapping.json b/examples/json_map/merge_linked.mapping.json new file mode 100644 index 000000000..47ede8b92 --- /dev/null +++ b/examples/json_map/merge_linked.mapping.json @@ -0,0 +1,25 @@ +{ + "/@default": "entry", + "/ENTRY[entry]/DATA[data]/current": {"link": "current.nxs:/entry/data/current"}, + "/ENTRY[entry]/DATA[data]/current_295C": {"link": "current.nxs:/entry/data/current_295C"}, + "/ENTRY[entry]/DATA[data]/current_300C": {"link": "current.nxs:/entry/data/current_300C"}, + "/ENTRY[entry]/DATA[data]/current_305C": {"link": "current.nxs:/entry/data/current_305C"}, + "/ENTRY[entry]/DATA[data]/current_310C": {"link": "current.nxs:/entry/data/current_310C"}, + "/ENTRY[entry]/DATA[data]/temperature": {"link": "voltage_and_temperature.nxs:/entry/data/temperature"}, + "/ENTRY[entry]/DATA[data]/voltage": {"link": "voltage_and_temperature.nxs:/entry/data/voltage"}, + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/voltage_controller": {"link": "voltage_and_temperature.nxs:/entry/instrument/environment/voltage_controller"}, + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/temperature_controller": {"link": "voltage_and_temperature.nxs:/entry/instrument/environment/temperature_controller"}, + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/current_sensor": {"link": "current.nxs:/entry/instrument/environment/current_sensor"}, + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/independent_controllers": ["voltage_controller", "temperature_control"], + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/measurement_sensors": ["current_sensor"], + "/ENTRY[entry]/INSTRUMENT[instrument]/ENVIRONMENT[environment]/NXpid[heating_pid]": {"link": "voltage_and_temperature.nxs:/entry/instrument/environment/heating_pid"}, + "/ENTRY[entry]/PROCESS[process]/program": "Bluesky", + "/ENTRY[entry]/PROCESS[process]/program/@version": "1.6.7", + "/ENTRY[entry]/SAMPLE[sample]/name": "super", + "/ENTRY[entry]/SAMPLE[sample]/atom_types": "Si, C", + "/ENTRY[entry]/definition": "NXiv_temp", + "/ENTRY[entry]/definition/@version": "1", + "/ENTRY[entry]/experiment_identifier": "dbdfed37-35ed-4aee-a465-aaa0577205b1", + "/ENTRY[entry]/experiment_description": "A simple IV temperature experiment.", + "/ENTRY[entry]/start_time": "2022-05-30T16:37:03.909201+02:00" +} \ No newline at end of file diff --git a/examples/sts/README.md b/examples/sts/README.md new file mode 100644 index 000000000..eb2c53482 --- /dev/null +++ b/examples/sts/README.md @@ -0,0 +1,32 @@ +# STS Reader +***Note: Though the reader name is STS reader it also supports STM type experiment. This is the first version of the reader according to the NeXus application definition [NXsts](https://github.com/FAIRmat-NFDI/nexus_definitions/blob/fairmat/contributed_definitions/NXsts.nxdl.xml) which is a generic template of concepts' definition for STS and STM experiments. Later on, both application definitions and readers specific to the STM, STS and AFM will be available. To stay upto date keep visiting this page time to time. From now onwards we will mention STS referring both STM and STS.*** + +Main goal of STS Reader is to transform different file formats from diverse STS lab into STS community standard [STS application definition](https://github.com/FAIRmat-NFDI/nexus_definitions/blob/fairmat/contributed_definitions/NXsts.nxdl.xml), community defined template that define indivisual concept associated with STS experiment constructed by SPM community. +## STS Example +It has diverse examples from several versions (Generic 5e and Generic 4.5) of Nanonis software for STS experiments at [https://gitlab.mpcdf.mpg.de](https://gitlab.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/-/tree/develop/docker/sts). But, to utilize that examples one must have an account at https://gitlab.mpcdf.mpg.de. If still you want to try the examples from the sts reader out, please reach out to [Rubel Mozumder](mozumder@physik.hu-berlin.de) or the docker container (discussed below). + +To get a detailed overview of the sts reader implementation visit [pynxtools](https://github.com/FAIRmat-NFDI/pynxtools/tree/master/pynxtools/dataconverter/readers/sts). + +## STS deocker image +STS docker image contains all prerequisite tools (e.g. jupyter-notebook) and library to run STS reader. To use the image user needs to [install docker engine](https://docs.docker.com/engine/install/). + +STS Image: `gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/sts-jupyter:latest` + +To run the STS image as a docker container copy the code below in a file `docker-compose.yaml` + +```docker +# docker-compose.yaml + +version: "3.9" + +services: + sts: + image: gitlab-registry.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/sts-jupyter:latest + ports: + - 8888:8888 + volumes: + - ./example:/home/jovyan/work_dir + working_dir: /home/jovyan/work_dir +``` + +and launch the file from the same directory with `docker compose up` command. diff --git a/pynxtools/__init__.py b/pynxtools/__init__.py index 2290aef3b..12b6f64ba 100644 --- a/pynxtools/__init__.py +++ b/pynxtools/__init__.py @@ -18,3 +18,71 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import os +import re +from datetime import datetime +from glob import glob +from typing import Union + +from pynxtools._build_wrapper import get_vcs_version +from pynxtools.definitions.dev_tools.globals.nxdl import get_nxdl_version + +MAIN_BRANCH_NAME = "fairmat" + + +def _build_version(tag: str, distance: int, node: str, dirty: bool) -> str: + """ + Builds the version string for a given set of git states. + This resembles `no-guess-dev` + `node-and-date` behavior from setuptools_scm. + """ + if distance == 0 and not dirty: + return f"{tag}" + + dirty_appendix = datetime.now().strftime(".d%Y%m%d") if dirty else "" + return f"{tag}.post1.dev{distance}+{node}{dirty_appendix}" + + +def format_version(version: str) -> str: + """ + Formats the git describe version string into the local format. + """ + version_parts = version.split("-") + + return _build_version( + version_parts[0], + int(version_parts[1]), + version_parts[2], + len(version_parts) == 4 and version_parts[3] == "dirty", + ) + + +def get_nexus_version() -> str: + """ + The version of the Nexus standard and the NeXus Definition language + based on git tags and commits + """ + version = get_vcs_version() + + if version is not None: + return format_version(version) + + version_file = os.path.join(os.path.dirname(__file__), "nexus-version.txt") + + if not os.path.exists(version_file): + # We are in the limbo, just get the nxdl version from nexus definitions + return format_version(get_nxdl_version()) + + with open(version_file, encoding="utf-8") as vfile: + return format_version(vfile.read().strip()) + + +def get_nexus_version_hash() -> str: + """ + Gets the git hash from the nexus version string + """ + version = re.search(r"g([a-z0-9]+)", get_nexus_version()) + + if version is None: + return MAIN_BRANCH_NAME + + return version.group(1) diff --git a/pynxtools/_build_wrapper.py b/pynxtools/_build_wrapper.py new file mode 100644 index 000000000..d7788860d --- /dev/null +++ b/pynxtools/_build_wrapper.py @@ -0,0 +1,71 @@ +""" +Build wrapper for setuptools to create a nexus-version.txt file +containing the nexus definitions verison. +""" +import os +from subprocess import CalledProcessError, run +from typing import Optional + +from setuptools import build_meta as _orig +from setuptools.build_meta import * # pylint: disable=wildcard-import,unused-wildcard-import + + +def get_vcs_version(tag_match="*[0-9]*") -> Optional[str]: + """ + The version of the Nexus standard and the NeXus Definition language + based on git tags and commits + """ + try: + return ( + run( + [ + "git", + "describe", + "--dirty", + "--tags", + "--long", + "--match", + tag_match, + ], + cwd=os.path.join(os.path.dirname(__file__), "../pynxtools/definitions"), + check=True, + capture_output=True, + ) + .stdout.decode("utf-8") + .strip() + ) + except (FileNotFoundError, CalledProcessError): + return None + + +def _write_version_to_metadata(): + version = get_vcs_version() + if version is None or not version: + return + + with open( + os.path.join(os.path.dirname(__file__), "nexus-version.txt"), + "w+", + encoding="utf-8", + ) as file: + file.write(version) + + +# pylint: disable=function-redefined +def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): + """ + PEP 517 compliant build wheel hook. + This is a wrapper for setuptools and adds a nexus version file. + """ + _write_version_to_metadata() + return _orig.build_wheel(wheel_directory, config_settings, metadata_directory) + + +# pylint: disable=function-redefined +def build_sdist(sdist_directory, config_settings=None): + """ + PEP 517 compliant build sdist hook. + This is a wrapper for setuptools and adds a nexus version file. + """ + _write_version_to_metadata() + return _orig.build_sdist(sdist_directory, config_settings) diff --git a/pynxtools/dataconverter/README.md b/pynxtools/dataconverter/README.md index 617c2de1f..f8d600f41 100644 --- a/pynxtools/dataconverter/README.md +++ b/pynxtools/dataconverter/README.md @@ -23,7 +23,7 @@ Usage: dataconverter [OPTIONS] Options: --input-file TEXT The path to the input data file to read. (Repeat for more than one file.) - --reader [apm|ellips|em_nion|em_spctrscpy|example|hall|json_map|json_yml|mpes|rii_database|transmission|xps] + --reader [apm|ellips|em_nion|em_om|em_spctrscpy|example|hall|json_map|json_yml|mpes|rii_database|sts|transmission|xps] The reader to use. default="example" --nxdl TEXT The name of the NXDL file to use without extension. @@ -35,9 +35,28 @@ Options: checking the documentation. --params-file FILENAME Allows to pass a .yaml file with all the parameters the converter supports. + --undocumented Shows a log output for all undocumented + fields + --mapping TEXT Takes a .mapping.json file and + converts data from given input files. --help Show this message and exit. ``` +#### Merge partial NeXus files into one + +```console +user@box:~$ dataconverter --nxdl nxdl --input-file partial1.nxs --input-file partial2.nxs +``` + +#### Map an HDF5/JSON/(Python Dict pickled in a pickle file) + +```console +user@box:~$ dataconverter --nxdl nxdl --input-file any_data.hdf5 --mapping my_custom_map.mapping.json +``` + +#### You can find actual examples with data files at [`examples/json_map`](../../examples/json_map/). + + #### Use with multiple input files ```console diff --git a/pynxtools/dataconverter/convert.py b/pynxtools/dataconverter/convert.py index f63e782e2..46c9af7eb 100644 --- a/pynxtools/dataconverter/convert.py +++ b/pynxtools/dataconverter/convert.py @@ -22,22 +22,26 @@ import logging import os import sys -from shutil import copyfile -from typing import List, Tuple +from typing import List, Tuple, Optional import xml.etree.ElementTree as ET import click import yaml - from pynxtools.dataconverter.readers.base.reader import BaseReader from pynxtools.dataconverter import helpers from pynxtools.dataconverter.writer import Writer from pynxtools.dataconverter.template import Template from pynxtools.nexus import nexus +if sys.version_info >= (3, 10): + from importlib.metadata import entry_points +else: + from importlib_metadata import entry_points + logger = logging.getLogger(__name__) # pylint: disable=C0103 +UNDOCUMENTED = 9 logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler(sys.stdout)) @@ -47,8 +51,18 @@ def get_reader(reader_name) -> BaseReader: path_prefix = f"{os.path.dirname(__file__)}{os.sep}" if os.path.dirname(__file__) else "" path = os.path.join(path_prefix, "readers", reader_name, "reader.py") spec = importlib.util.spec_from_file_location("reader.py", path) - module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(module) # type: ignore[attr-defined] + try: + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) # type: ignore[attr-defined] + except FileNotFoundError as exc: + # pylint: disable=unexpected-keyword-arg + importlib_module = entry_points(group='pynxtools.reader') + if ( + importlib_module + and reader_name in map(lambda ep: ep.name, entry_points(group='pynxtools.reader')) + ): + return importlib_module[reader_name].load() + raise ValueError(f"The reader, {reader_name}, was not found.") from exc return module.READER # type: ignore[attr-defined] @@ -62,96 +76,150 @@ def get_names_of_all_readers() -> List[str]: index_of_readers_folder_name = file.rindex(f"readers{os.sep}") + len(f"readers{os.sep}") index_of_last_path_sep = file.rindex(os.sep) all_readers.append(file[index_of_readers_folder_name:index_of_last_path_sep]) - return all_readers - - -def append_template_data_to_acopy_of_one_inputfile(input: Tuple[str], output: str): - """Helper function to build outputfile based on one inputfile plus template data.""" - # There are cases in which one of the inputfiles may contain already NeXus content - # typically because the scientific software tool generates such a file - # matching a specific application definition and thus additional pieces of information - # inside the template (e.g. from an ELN) should just be added to that inputfile - - # one may or not in this case demand for a verification of that input file - # before continuing, currently we ignore this verification - for file_name in input: - if file_name[0:file_name.rfind('.')] != output: - continue - else: - print(f"Creating the output {output} based the this input {file_name}\n" \ - f"NeXus content in {file_name} is currently not verified !!!") - copyfile(file_name, output) - - print(f"Template data will be added to the output {output}...\n" \ - f"Only these template data will be verified !!!") - # when calling dataconverter with - # --input-file processed.nxs.mtex - # --output processed.nxs - # -- io_mode="r+" - # these calls can be executed repetitively as the first step is - # the copying operation of *.nxs.mtex to *.nxs and then the access on the *.nxs - # file using h5py is then read/write without regeneration - # a repeated call has factually the same effect as the dataconverter - # used to work i.e. using h5py with "w" would regenerate the *.nxs if already existent - # this is a required to assure that repetitive calls of the ELN save function - # in NOMAD do not end up with write conflicts on the *.nxs i.e. the output file - # when the dataconverter is called - return - - -# pylint: disable=too-many-arguments -def convert(input_file: Tuple[str], - reader: str, - nxdl: str, - output: str, - io_mode: str = "w", - generate_template: bool = False, - fair: bool = False, - **kwargs): - """The conversion routine that takes the input parameters and calls the necessary functions.""" + plugins = list(map(lambda ep: ep.name, entry_points(group='pynxtools.reader'))) + return all_readers + plugins + + +def get_nxdl_root_and_path(nxdl: str): + """Get xml root element and file path from nxdl name e.g. NXapm. + + Parameters + ---------- + nxdl: str + Name of nxdl file e.g. NXapm from NXapm.nxdl.xml. + + Returns + ------- + ET.root + Root element of nxdl file. + str + Path of nxdl file. + + Raises + ------ + FileNotFoundError + Error if no file with the given nxdl name is found. + """ # Reading in the NXDL and generating a template definitions_path = nexus.get_nexus_definitions_path() if nxdl == "NXtest": - nxdl_path = os.path.join( + nxdl_f_path = os.path.join( f"{os.path.abspath(os.path.dirname(__file__))}/../../", "tests", "data", "dataconverter", "NXtest.nxdl.xml") elif nxdl == "NXroot": - nxdl_path = os.path.join(definitions_path, "base_classes", "NXroot.nxdl.xml") + nxdl_f_path = os.path.join(definitions_path, "base_classes", "NXroot.nxdl.xml") else: - nxdl_path = os.path.join(definitions_path, "contributed_definitions", f"{nxdl}.nxdl.xml") - if not os.path.exists(nxdl_path): - nxdl_path = os.path.join(definitions_path, "applications", f"{nxdl}.nxdl.xml") - if not os.path.exists(nxdl_path): + nxdl_f_path = os.path.join(definitions_path, "contributed_definitions", f"{nxdl}.nxdl.xml") + if not os.path.exists(nxdl_f_path): + nxdl_f_path = os.path.join(definitions_path, "applications", f"{nxdl}.nxdl.xml") + if not os.path.exists(nxdl_f_path): + nxdl_f_path = os.path.join(definitions_path, "base_classes", f"{nxdl}.nxdl.xml") + if not os.path.exists(nxdl_f_path): raise FileNotFoundError(f"The nxdl file, {nxdl}, was not found.") - nxdl_root = ET.parse(nxdl_path).getroot() + return ET.parse(nxdl_f_path).getroot(), nxdl_f_path + + +def transfer_data_into_template(input_file, + reader, nxdl_name, + nxdl_root: Optional[ET.Element] = None, + **kwargs): + """Transfer parse and merged data from input experimental file, config file and eln. + + Experimental and eln files will be parsed and finally will be merged into template. + Before returning the template validate the template data. + + Parameters + ---------- + input_file : Union[tuple[str], str] + Tuple of files or file + reader: str + Name of reader such as xps + nxdl_name : str + Root name of nxdl file, e.g. NXmpes from NXmpes.nxdl.xml + nxdl_root : ET.element + Root element of nxdl file, otherwise provide nxdl_name + + Returns + ------- + Template + Template filled with data from raw file and eln file. + + """ + if nxdl_root is None: + nxdl_root, _ = get_nxdl_root_and_path(nxdl=nxdl_name) template = Template() helpers.generate_template_from_nxdl(nxdl_root, template) - if generate_template: - logger.info(template) - return - # Setting up all the input data if isinstance(input_file, str): input_file = (input_file,) + bulletpoint = "\n\u2022 " logger.info("Using %s reader to convert the given files: %s ", reader, bulletpoint.join((" ", *input_file))) data_reader = get_reader(reader) - if not (nxdl in data_reader.supported_nxdls or "*" in data_reader.supported_nxdls): + if not (nxdl_name in data_reader.supported_nxdls or "*" in data_reader.supported_nxdls): raise NotImplementedError("The chosen NXDL isn't supported by the selected reader.") data = data_reader().read( # type: ignore[operator] template=Template(template), file_paths=input_file, - **kwargs, + **kwargs ) - helpers.validate_data_dict(template, data, nxdl_root) + return data + + +# pylint: disable=too-many-arguments,too-many-locals +def convert(input_file: Tuple[str, ...], + reader: str, + nxdl: str, + output: str, + generate_template: bool = False, + fair: bool = False, + undocumented: bool = False, + **kwargs): + """The conversion routine that takes the input parameters and calls the necessary functions. + + Parameters + ---------- + input_file : Tuple[str] + Tuple of files or file + reader: str + Name of reader such as xps + nxdl : str + Root name of nxdl file, e.g. NXmpes for NXmpes.nxdl.xml + output : str + Output file name. + generate_template : bool, default False + True if user wants template in logger info. + fair : bool, default False + If True, a warning is given that there are undocumented paths + in the template. + undocumented : bool, default False + If True, an undocumented warning is given. + + Returns + ------- + None. + """ + + nxdl_root, nxdl_f_path = get_nxdl_root_and_path(nxdl) + + if generate_template: + template = Template() + helpers.generate_template_from_nxdl(nxdl_root, template) + logger.info(template) + return + data = transfer_data_into_template(input_file=input_file, reader=reader, + nxdl_name=nxdl, nxdl_root=nxdl_root, + **kwargs) + if undocumented: + logger.setLevel(UNDOCUMENTED) if fair and data.undocumented.keys(): logger.warning("There are undocumented paths in the template. This is not acceptable!") return @@ -159,13 +227,13 @@ def convert(input_file: Tuple[str], for path in data.undocumented.keys(): if "/@default" in path: continue - logger.warning("The path, %s, is being written but has no documentation.", path) - - if io_mode == "r+": - append_template_data_to_acopy_of_one_inputfile( - input=input_file, output=output) - - Writer(data=data, nxdl_path=nxdl_path, output_path=output, io_mode=io_mode).write() + logger.log( + UNDOCUMENTED, + "The path, %s, is being written but has no documentation.", + path + ) + helpers.add_default_root_attributes(data=data, filename=os.path.basename(output)) + Writer(data=data, nxdl_f_path=nxdl_f_path, output_path=output).write() logger.info("The output file generated: %s", output) @@ -187,7 +255,7 @@ def parse_params_file(params_file): ) @click.option( '--reader', - default='example', + default='json_map', type=click.Choice(get_names_of_all_readers(), case_sensitive=False), help='The reader to use. default="example"' ) @@ -202,11 +270,6 @@ def parse_params_file(params_file): default='output.nxs', help='The path to the output NeXus file to be generated.' ) -@click.option( - '--io_mode', - default='w', - help='I/O mode on the output NeXus file, see h5py doc for mode details, default="w".' -) @click.option( '--generate-template', is_flag=True, @@ -218,21 +281,33 @@ def parse_params_file(params_file): is_flag=True, default=False, help='Let the converter know to be stricter in checking the documentation.' -) # pylint: disable=too-many-arguments +) @click.option( '--params-file', type=click.File('r'), default=None, help='Allows to pass a .yaml file with all the parameters the converter supports.' ) -def convert_cli(input_file: Tuple[str], +@click.option( + '--undocumented', + is_flag=True, + default=False, + help='Shows a log output for all undocumented fields' +) +@click.option( + '--mapping', + help='Takes a .mapping.json file and converts data from given input files.' +) +# pylint: disable=too-many-arguments +def convert_cli(input_file: Tuple[str, ...], reader: str, nxdl: str, output: str, - io_mode: str, generate_template: bool, fair: bool, - params_file: str): + params_file: str, + undocumented: bool, + mapping: str): """The CLI entrypoint for the convert function""" if params_file: try: @@ -248,7 +323,11 @@ def convert_cli(input_file: Tuple[str], sys.tracebacklimit = 0 raise IOError("\nError: Please supply an NXDL file with the option:" " --nxdl ") - convert(input_file, reader, nxdl, output, io_mode, generate_template, fair) + if mapping: + reader = "json_map" + if mapping: + input_file = input_file + tuple([mapping]) + convert(input_file, reader, nxdl, output, generate_template, fair, undocumented) if __name__ == '__main__': diff --git a/pynxtools/dataconverter/hdfdict.py b/pynxtools/dataconverter/hdfdict.py index 4edb68259..a4bbf87e6 100644 --- a/pynxtools/dataconverter/hdfdict.py +++ b/pynxtools/dataconverter/hdfdict.py @@ -123,7 +123,16 @@ def _recurse(hdfobject, datadict): elif isinstance(value, h5py.Dataset): if not lazy: value = unpacker(value) - datadict[key] = value + datadict[key] = ( + value.asstr()[...] + if h5py.check_string_dtype(value.dtype) + else value + ) + + if "attrs" in dir(value): + datadict[key + "@"] = {} + for attr, attrval in value.attrs.items(): + datadict[key + "@"][attr] = attrval return datadict diff --git a/pynxtools/dataconverter/helpers.py b/pynxtools/dataconverter/helpers.py index 75a2bc2b9..57d526f4b 100644 --- a/pynxtools/dataconverter/helpers.py +++ b/pynxtools/dataconverter/helpers.py @@ -17,17 +17,24 @@ # """Helper functions commonly used by the convert routine.""" -from typing import List +from typing import List, Optional, Any from typing import Tuple, Callable, Union import re import xml.etree.ElementTree as ET +from datetime import datetime, timezone +import logging +import json import numpy as np from ase.data import chemical_symbols +import h5py +from pynxtools import get_nexus_version, get_nexus_version_hash from pynxtools.nexus import nexus from pynxtools.nexus.nexus import NxdlAttributeError +logger = logging.getLogger(__name__) + def is_a_lone_group(xml_element) -> bool: """Checks whether a given group XML element has no field or attributes mentioned""" @@ -155,6 +162,20 @@ def generate_template_from_nxdl(root, template, path="", nxdl_root=None, nxdl_na path_nxdl = convert_data_converter_dict_to_nxdl_path(path) list_of_children_to_add = get_all_defined_required_children(path_nxdl, nxdl_name) add_inherited_children(list_of_children_to_add, path, nxdl_root, template) + # Handling link: link has a target attibute that store absolute path of concept to be + # linked. Writer reads link from template in the format {'link': } + # {'link': ':/'} + elif tag == "link": + # NOTE: The code below can be implemented later once, NeXus brings optionality in + # link. Otherwise link will be considered optional by default. + + # optionality = get_required_string(root) + # optional_parent = check_for_optional_parent(path, nxdl_root) + # optionality = "required" if optional_parent == "<>" else "optional" + # if optionality == "optional": + # template.optional_parents.append(optional_parent) + optionality = "optional" + template[optionality][path] = {'link': root.attrib['target']} for child in root: generate_template_from_nxdl(child, template, path, nxdl_root, nxdl_name) @@ -333,7 +354,7 @@ def path_in_data_dict(nxdl_path: str, data: dict) -> Tuple[bool, str]: for key in data.keys(): if nxdl_path == convert_data_converter_dict_to_nxdl_path(key): return True, key - return False, "" + return False, None def check_for_optional_parent(path: str, nxdl_root: ET.Element) -> str: @@ -366,6 +387,8 @@ def all_required_children_are_set(optional_parent_path, data, nxdl_root): """Walks over optional parent's children and makes sure all required ones are set""" optional_parent_path = convert_data_converter_dict_to_nxdl_path(optional_parent_path) for key in data: + if key in data["lone_groups"]: + continue nxdl_key = convert_data_converter_dict_to_nxdl_path(key) if nxdl_key[0:nxdl_key.rfind("/")] == optional_parent_path \ and is_node_required(nxdl_key, nxdl_root) \ @@ -424,7 +447,7 @@ def does_group_exist(path_to_group, data): return False -def ensure_all_required_fields_exist(template, data): +def ensure_all_required_fields_exist(template, data, nxdl_root): """Checks whether all the required fields are in the returned data object.""" for path in template["required"]: entry_name = get_name_from_data_dict_entry(path[path.rindex('/') + 1:]) @@ -432,9 +455,18 @@ def ensure_all_required_fields_exist(template, data): continue nxdl_path = convert_data_converter_dict_to_nxdl_path(path) is_path_in_data_dict, renamed_path = path_in_data_dict(nxdl_path, data) - if path in template["lone_groups"] and does_group_exist(path, data): - continue + renamed_path = path if renamed_path is None else renamed_path + if path in template["lone_groups"]: + opt_parent = check_for_optional_parent(path, nxdl_root) + if opt_parent != "<>": + if does_group_exist(opt_parent, data) and not does_group_exist(renamed_path, data): + raise ValueError(f"The required group, {path}, hasn't been supplied" + f" while its optional parent, {path}, is supplied.") + continue + if not does_group_exist(renamed_path, data): + raise ValueError(f"The required group, {path}, hasn't been supplied.") + continue if not is_path_in_data_dict or data[renamed_path] is None: raise ValueError(f"The data entry corresponding to {path} is required " f"and hasn't been supplied by the reader.") @@ -475,11 +507,10 @@ def validate_data_dict(template, data, nxdl_root: ET.Element): nxdl_path_to_elm: dict = {} # Make sure all required fields exist. - ensure_all_required_fields_exist(template, data) + ensure_all_required_fields_exist(template, data, nxdl_root) try_undocumented(data, nxdl_root) for path in data.get_documented().keys(): - # print(f"{path}") if data[path] is not None: entry_name = get_name_from_data_dict_entry(path[path.rindex('/') + 1:]) nxdl_path = convert_data_converter_dict_to_nxdl_path(path) @@ -559,12 +590,38 @@ def convert_to_hill(atoms_typ): return atom_list + list(atoms_typ) +def add_default_root_attributes(data, filename): + """ + Takes a dict/Template and adds NXroot fields/attributes that are inherently available + """ + def update_and_warn(key: str, value: str): + if key in data and data[key] != value: + logger.warning( + "The NXroot entry '%s' (value: %s) should not be populated by the reader. " + "This is overwritten by the actually used value '%s'", + key, data[key], value + ) + data[key] = value + + update_and_warn("/@NX_class", "NXroot") + update_and_warn("/@file_name", filename) + update_and_warn("/@file_time", str(datetime.now(timezone.utc).astimezone())) + update_and_warn("/@file_update_time", data["/@file_time"]) + update_and_warn( + "/@NeXus_repository", + "https://github.com/FAIRmat-NFDI/nexus_definitions/" + f"blob/{get_nexus_version_hash()}" + ) + update_and_warn("/@NeXus_version", get_nexus_version()) + update_and_warn("/@HDF5_version", '.'.join(map(str, h5py.h5.get_libversion()))) + update_and_warn("/@h5py_version", h5py.__version__) + + def extract_atom_types(formula, mode='hill'): """Extract atom types form chemical formula.""" - atom_types: set = set() element: str = "" - # tested with "(C38H54S4)n(NaO2)5(CH4)NH3B" + for char in formula: if char.isalpha(): if char.isupper() and element == "": @@ -594,3 +651,77 @@ def extract_atom_types(formula, mode='hill'): return convert_to_hill(atom_types) return atom_types + + +# pylint: disable=too-many-branches +def transform_to_intended_dt(str_value: Any) -> Optional[Any]: + """Transform string to the intended data type, if not then return str_value. + + E.g '2.5E-2' will be transfor into 2.5E-2 + tested with: '2.4E-23', '28', '45.98', 'test', ['59', '3.00005', '498E-34'], + '23 34 444 5000', None + with result: 2.4e-23, 28, 45.98, test, [5.90000e+01 3.00005e+00 4.98000e-32], + np.array([23 34 444 5000]), None + NOTE: add another arg in this func for giving 'hint' what kind of data like + numpy array or list + Parameters + ---------- + str_value : str + Data from other format that comes as string e.g. string of list. + + Returns + ------- + Union[str, int, float, np.ndarray] + Converted data type + """ + + symbol_list_for_data_seperation = [';', ' '] + transformed: Any = None + + if isinstance(str_value, list): + try: + transformed = np.array(str_value, dtype=np.float64) + return transformed + except ValueError: + pass + + elif isinstance(str_value, np.ndarray): + return str_value + elif isinstance(str_value, str): + try: + transformed = int(str_value) + except ValueError: + try: + transformed = float(str_value) + except ValueError: + if '[' in str_value and ']' in str_value: + transformed = json.loads(str_value) + if transformed is not None: + return transformed + for sym in symbol_list_for_data_seperation: + if sym in str_value: + parts = str_value.split(sym) + modified_parts: List = [] + for part in parts: + part = transform_to_intended_dt(part) + if isinstance(part, (int, float)): + modified_parts.append(part) + else: + return str_value + return transform_to_intended_dt(modified_parts) + + return str_value + + +def nested_dict_to_slash_separated_path(nested_dict: dict, + flattened_dict: dict, + parent_path=''): + """Convert nested dict into slash separeted path upto certain level.""" + sep = '/' + + for key, val in nested_dict.items(): + path = parent_path + sep + key + if isinstance(val, dict): + nested_dict_to_slash_separated_path(val, flattened_dict, path) + else: + flattened_dict[path] = val diff --git a/pynxtools/dataconverter/readers/apm/map_concepts/apm_deployment_specifics_to_nx_map.py b/pynxtools/dataconverter/readers/apm/map_concepts/apm_deployment_specifics_to_nx_map.py new file mode 100644 index 000000000..d4cdf84f6 --- /dev/null +++ b/pynxtools/dataconverter/readers/apm/map_concepts/apm_deployment_specifics_to_nx_map.py @@ -0,0 +1,52 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Dict mapping values for a specific deployed config of NOMAD OASIS + ELN + apm reader.""" + +# pylint: disable=no-member,line-too-long + +# currently by virtue of design NOMAD OASIS specific examples show how different tools and +# services can be specifically coupled and implemented so that they work together +# currently we assume that the ELN provides all those pieces of information to instantiate +# a NeXus data artifact which technology-partner-specific files or database blobs can not +# deliver. Effectively a reader uses the eln_data.yaml generic ELN output to fill in these +# missing pieces of information while typically heavy data (tensors etc) are translated +# and written from the technology-partner files +# for large application definitions this can lead to a practical inconvenience: +# the ELN that has to be exposed to the user is complex and has many fields to fill in +# just to assure that all information are included in the ELN output and thus consumable +# by the dataconverter +# taking the perspective of a specific lab where a specific version of an ELN provided by +# or running in addition to NOMAD OASIS is used many pieces of information might not change +# or administrators do not wish to expose this via the end user ELN in an effort to reduce +# the complexity for end users and make entering of repetitiv information obsolete + +# this is the scenario for which deployment_specific mapping shines +# parsing of deployment specific details in the apm reader is currently implemented +# such that it executes after reading generic ELN data (eventually available entries) +# in the template get overwritten + +from pynxtools.dataconverter.readers.apm.utils.apm_versioning \ + import NX_APM_ADEF_NAME, NX_APM_ADEF_VERSION, NX_APM_EXEC_NAME, NX_APM_EXEC_VERSION + + +NxApmDeploymentSpecificInput \ + = {"/ENTRY[entry*]/@version": f"{NX_APM_ADEF_VERSION}", + "/ENTRY[entry*]/definition": f"{NX_APM_ADEF_NAME}", + "/ENTRY[entry*]/PROGRAM[program1]/program": f"{NX_APM_EXEC_NAME}", + "/ENTRY[entry*]/PROGRAM[program1]/program/@version": f"{NX_APM_EXEC_VERSION}", + "/ENTRY[entry*]/atom_probe/location": {"fun": "load_from", "terms": "location"}} diff --git a/pynxtools/dataconverter/readers/apm/map_concepts/apm_eln_to_nx_map.py b/pynxtools/dataconverter/readers/apm/map_concepts/apm_eln_to_nx_map.py new file mode 100644 index 000000000..76c763f47 --- /dev/null +++ b/pynxtools/dataconverter/readers/apm/map_concepts/apm_eln_to_nx_map.py @@ -0,0 +1,109 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Dict mapping custom schema instances from eln_data.yaml file on concepts in NXapm.""" + +NxApmElnInput = {"IGNORE": {"fun": "load_from_dict_list", "terms": "em_lab/detector"}, + "IGNORE": {"fun": "load_from", "terms": "em_lab/ebeam_column/aberration_correction/applied"}, + "IGNORE": {"fun": "load_from_dict_list", "terms": "em_lab/ebeam_column/aperture_em"}, + "/ENTRY[entry*]/PROGRAM[program2]/program": {"fun": "load_from", "terms": "atom_probe/control_software_program"}, + "/ENTRY[entry*]/PROGRAM[program2]/program/@version": {"fun": "load_from", "terms": "atom_probe/control_software_program__attr_version"}, + "/ENTRY[entry*]/experiment_identifier": {"fun": "load_from", "terms": "entry/experiment_identifier"}, + "/ENTRY[entry*]/start_time": {"fun": "load_from", "terms": "entry/start_time"}, + "/ENTRY[entry*]/end_time": {"fun": "load_from", "terms": "entry/end_time"}, + "/ENTRY[entry*]/run_number": {"fun": "load_from", "terms": "entry/run_number"}, + "/ENTRY[entry*]/operation_mode": {"fun": "load_from", "terms": "entry/operation_mode"}, + "/ENTRY[entry*]/experiment_description": {"fun": "load_from", "terms": "entry/experiment_description"}, + "IGNORE": {"fun": "load_from", "terms": "sample/alias"}, + "/ENTRY[entry*]/sample/grain_diameter": {"fun": "load_from", "terms": "sample/grain_diameter/value"}, + "/ENTRY[entry*]/sample/grain_diameter/@units": {"fun": "load_from", "terms": "sample/grain_diameter/unit"}, + "/ENTRY[entry*]/sample/grain_diameter_error": {"fun": "load_from", "terms": "sample/grain_diameter_error/value"}, + "/ENTRY[entry*]/sample/grain_diameter_error/@units": {"fun": "load_from", "terms": "sample/grain_diameter_error/unit"}, + "/ENTRY[entry*]/sample/heat_treatment_quenching_rate": {"fun": "load_from", "terms": "sample/heat_treatment_quenching_rate/value"}, + "/ENTRY[entry*]/sample/heat_treatment_quenching_rate/@units": {"fun": "load_from", "terms": "sample/heat_treatment_quenching_rate/unit"}, + "/ENTRY[entry*]/sample/heat_treatment_quenching_rate_error": {"fun": "load_from", "terms": "sample/heat_treatment_quenching_rate_error/value"}, + "/ENTRY[entry*]/sample/heat_treatment_quenching_rate_error/@units": {"fun": "load_from", "terms": "sample/heat_treatment_quenching_rate_error/unit"}, + "/ENTRY[entry*]/sample/heat_treatment_temperature": {"fun": "load_from", "terms": "sample/heat_treatment_temperature/value"}, + "/ENTRY[entry*]/sample/heat_treatment_temperature/@units": {"fun": "load_from", "terms": "sample/heat_treatment_temperature/unit"}, + "/ENTRY[entry*]/sample/heat_treatment_temperature_error": {"fun": "load_from", "terms": "sample/heat_treatment_temperature_error/value"}, + "/ENTRY[entry*]/sample/heat_treatment_temperature_error/@units": {"fun": "load_from", "terms": "sample/heat_treatment_temperature_error/unit"}, + "/ENTRY[entry*]/specimen/name": {"fun": "load_from", "terms": "specimen/name"}, + "/ENTRY[entry*]/specimen/preparation_date": {"fun": "load_from", "terms": "specimen/preparation_date"}, + "IGNORE": {"fun": "load_from", "terms": "specimen/sample_history"}, + "/ENTRY[entry*]/specimen/alias": {"fun": "load_from", "terms": "specimen/alias"}, + "/ENTRY[entry*]/specimen/is_polycrystalline": {"fun": "load_from", "terms": "specimen/is_polycrystalline"}, + "/ENTRY[entry*]/specimen/description": {"fun": "load_from", "terms": "specimen/description"}, + "/ENTRY[entry*]/atom_probe/FABRICATION[fabrication]/identifier": {"fun": "load_from", "terms": "atom_probe/fabrication_identifier"}, + "/ENTRY[entry*]/atom_probe/FABRICATION[fabrication]/model": {"fun": "load_from", "terms": "atom_probe/fabrication_model"}, + "/ENTRY[entry*]/atom_probe/FABRICATION[fabrication]/vendor": {"fun": "load_from", "terms": "atom_probe/fabrication_vendor"}, + "/ENTRY[entry*]/atom_probe/analysis_chamber/pressure": {"fun": "load_from", "terms": "atom_probe/analysis_chamber_pressure/value"}, + "/ENTRY[entry*]/atom_probe/analysis_chamber/pressure/@units": {"fun": "load_from", "terms": "atom_probe/analysis_chamber_pressure/unit"}, + "/ENTRY[entry*]/atom_probe/control_software/PROGRAM[program1]/program": {"fun": "load_from", "terms": "atom_probe/control_software_program"}, + "/ENTRY[entry*]/atom_probe/control_software/PROGRAM[program1]/program/@version": {"fun": "load_from", "terms": "atom_probe/control_software_program__attr_version"}, + "/ENTRY[entry*]/atom_probe/field_of_view": {"fun": "load_from", "terms": "atom_probe/field_of_view/value"}, + "/ENTRY[entry*]/atom_probe/field_of_view/@units": {"fun": "load_from", "terms": "atom_probe/field_of_view/unit"}, + "/ENTRY[entry*]/atom_probe/flight_path_length": {"fun": "load_from", "terms": "atom_probe/flight_path_length/value"}, + "/ENTRY[entry*]/atom_probe/flight_path_length/@units": {"fun": "load_from", "terms": "atom_probe/flight_path_length/unit"}, + "/ENTRY[entry*]/atom_probe/instrument_name": {"fun": "load_from", "terms": "atom_probe/instrument_name"}, + "/ENTRY[entry*]/atom_probe/ion_detector/model": {"fun": "load_from", "terms": "atom_probe/ion_detector_model"}, + "/ENTRY[entry*]/atom_probe/ion_detector/name": {"fun": "load_from", "terms": "atom_probe/ion_detector_name"}, + "/ENTRY[entry*]/atom_probe/ion_detector/serial_number": {"fun": "load_from", "terms": "atom_probe/ion_detector_serial_number"}, + "/ENTRY[entry*]/atom_probe/ion_detector/type": {"fun": "load_from", "terms": "atom_probe/ion_detector_type"}, + "/ENTRY[entry*]/atom_probe/local_electrode/name": {"fun": "load_from", "terms": "atom_probe/local_electrode_name"}, + "/ENTRY[entry*]/atom_probe/location": {"fun": "load_from", "terms": "atom_probe/location"}, + "/ENTRY[entry*]/atom_probe/REFLECTRON[reflectron]/applied": {"fun": "load_from", "terms": "atom_probe/reflectron_applied"}, + "/ENTRY[entry*]/atom_probe/stage_lab/base_temperature": {"fun": "load_from", "terms": "atom_probe/stage_lab_base_temperature/value"}, + "/ENTRY[entry*]/atom_probe/stage_lab/base_temperature/@units": {"fun": "load_from", "terms": "atom_probe/stage_lab_base_temperature/unit"}, + "/ENTRY[entry*]/atom_probe/specimen_monitoring/detection_rate": {"fun": "load_from", "terms": "atom_probe/specimen_monitoring_detection_rate/value"}, + "/ENTRY[entry*]/atom_probe/specimen_monitoring/detection_rate/@units": {"fun": "load_from", "terms": "atom_probe/specimen_monitoring_detection_rate/unit"}, + "/ENTRY[entry*]/atom_probe/specimen_monitoring/initial_radius": {"fun": "load_from", "terms": "atom_probe/specimen_monitoring_initial_radius/value"}, + "/ENTRY[entry*]/atom_probe/specimen_monitoring/initial_radius/@units": {"fun": "load_from", "terms": "atom_probe/specimen_monitoring_initial_radius/unit"}, + "/ENTRY[entry*]/atom_probe/specimen_monitoring/shank_angle": {"fun": "load_from", "terms": "atom_probe/specimen_monitoring_shank_angle/value"}, + "/ENTRY[entry*]/atom_probe/specimen_monitoring/shank_angle/@units": {"fun": "load_from", "terms": "atom_probe/specimen_monitoring_shank_angle/unit"}, + "/ENTRY[entry*]/atom_probe/status": {"fun": "load_from", "terms": "atom_probe/status"}, + "/ENTRY[entry*]/atom_probe/pulser/pulse_fraction": {"fun": "load_from", "terms": "atom_probe/pulser/pulse_fraction"}, + "/ENTRY[entry*]/atom_probe/pulser/pulse_frequency": {"fun": "load_from", "terms": "atom_probe/pulser/pulse_frequency/value"}, + "/ENTRY[entry*]/atom_probe/pulser/pulse_frequency/@units": {"fun": "load_from", "terms": "atom_probe/pulser/pulse_frequency/unit"}, + "/ENTRY[entry*]/atom_probe/pulser/pulse_mode": {"fun": "load_from", "terms": "atom_probe/pulser/pulse_mode"}, + "/ENTRY[entry*]/atom_probe/ranging/PROGRAM[program1]/program": {"fun": "load_from", "terms": "atom_probe/ranging/program"}, + "/ENTRY[entry*]/atom_probe/ranging/PROGRAM[program1]/program/@version": {"fun": "load_from", "terms": "atom_probe/ranging/program__attr_version"}, + "/ENTRY[entry*]/atom_probe/reconstruction/PROGRAM[program1]/program": {"fun": "load_from", "terms": "atom_probe/reconstruction/program"}, + "/ENTRY[entry*]/atom_probe/reconstruction/PROGRAM[program1]/program/@version": {"fun": "load_from", "terms": "atom_probe/reconstruction/program__attr_version"}, + "/ENTRY[entry*]/atom_probe/reconstruction/crystallographic_calibration": {"fun": "load_from", "terms": "atom_probe/reconstruction/crystallographic_calibration"}, + "/ENTRY[entry*]/atom_probe/reconstruction/parameter": {"fun": "load_from", "terms": "atom_probe/reconstruction/parameter"}, + "/ENTRY[entry*]/atom_probe/reconstruction/protocol_name": {"fun": "load_from", "terms": "atom_probe/reconstruction/protocol_name"}} + +# NeXus concept specific mapping tables which require special treatment as the current +# NOMAD OASIS custom schema implementation delivers them as a list of dictionaries instead +# of a directly flattenable list of keyword, value pairs + +NxUserFromListOfDict = {"/ENTRY[entry*]/USER[user*]/name": {"fun": "load_from", "terms": "name"}, + "/ENTRY[entry*]/USER[user*]/affiliation": {"fun": "load_from", "terms": "affiliation"}, + "/ENTRY[entry*]/USER[user*]/address": {"fun": "load_from", "terms": "address"}, + "/ENTRY[entry*]/USER[user*]/email": {"fun": "load_from", "terms": "email"}, + "/ENTRY[entry*]/USER[user*]/orcid": {"fun": "load_from", "terms": "orcid"}, + "/ENTRY[entry*]/USER[user*]/orcid_platform": {"fun": "load_from", "terms": "orcid_platform"}, + "/ENTRY[entry*]/USER[user*]/telephone_number": {"fun": "load_from", "terms": "telephone_number"}, + "/ENTRY[entry*]/USER[user*]/role": {"fun": "load_from", "terms": "role"}, + "/ENTRY[entry*]/USER[user*]/social_media_name": {"fun": "load_from", "terms": "social_media_name"}, + "/ENTRY[entry*]/USER[user*]/social_media_platform": {"fun": "load_from", "terms": "social_media_platform"}} + +# LEAP6000 can use up to two lasers and voltage pulsing (both at the same time?) +NxPulserFromListOfDict = {"/ENTRY[entry*]/atom_probe/pulser/SOURCE[source*]/name": {"fun": "load_from", "terms": "name"}, + "/ENTRY[entry*]/atom_probe/pulser/SOURCE[source*]/power": {"fun": "load_from", "terms": "power"}, + "/ENTRY[entry*]/atom_probe/pulser/SOURCE[source*]/pulse_energy": {"fun": "load_from", "terms": "pulse_energy"}, + "/ENTRY[entry*]/atom_probe/pulser/SOURCE[source*]/wavelength": {"fun": "load_from", "terms": "wavelength"}} diff --git a/pynxtools/dataconverter/readers/apm/reader.py b/pynxtools/dataconverter/readers/apm/reader.py index 651100fd1..2e946257f 100644 --- a/pynxtools/dataconverter/readers/apm/reader.py +++ b/pynxtools/dataconverter/readers/apm/reader.py @@ -23,22 +23,25 @@ from pynxtools.dataconverter.readers.base.reader import BaseReader -from pynxtools.dataconverter.readers.apm.utils.apm_use_case_selector \ +from pynxtools.dataconverter.readers.apm.utils.apm_define_io_cases \ import ApmUseCaseSelector -from pynxtools.dataconverter.readers.apm.utils.apm_generic_eln_io \ +from pynxtools.dataconverter.readers.apm.utils.apm_load_deployment_specifics \ + import NxApmNomadOasisConfigurationParser + +from pynxtools.dataconverter.readers.apm.utils.apm_load_generic_eln \ import NxApmNomadOasisElnSchemaParser -from pynxtools.dataconverter.readers.apm.utils.apm_reconstruction_io \ +from pynxtools.dataconverter.readers.apm.utils.apm_load_reconstruction \ import ApmReconstructionParser -from pynxtools.dataconverter.readers.apm.utils.apm_ranging_io \ +from pynxtools.dataconverter.readers.apm.utils.apm_load_ranging \ import ApmRangingDefinitionsParser -from pynxtools.dataconverter.readers.apm.utils.apm_nexus_plots \ +from pynxtools.dataconverter.readers.apm.utils.apm_create_nx_default_plots \ import apm_default_plot_generator -from pynxtools.dataconverter.readers.apm.utils.apm_example_data \ +from pynxtools.dataconverter.readers.apm.utils.apm_generate_synthetic_data \ import ApmCreateExampleData # this apm parser combines multiple sub-parsers @@ -103,6 +106,12 @@ def read(self, print("No input file defined for eln data !") return {} + print("Parse (meta)data coming from a configuration that specific OASIS...") + if len(case.cfg) == 1: + nx_apm_cfg = NxApmNomadOasisConfigurationParser(case.cfg[0], entry_id) + nx_apm_cfg.report(template) + # having and or using a deployment-specific configuration is optional + print("Parse (numerical) data and metadata from ranging definitions file...") if len(case.reconstruction) == 1: nx_apm_recon = ApmReconstructionParser(case.reconstruction[0], entry_id) @@ -120,13 +129,10 @@ def read(self, print("Create NeXus default plottable data...") apm_default_plot_generator(template, n_entries) - debugging = False - if debugging is True: - print("Reporting state of template before passing to HDF5 writing...") - for keyword in template.keys(): - print(keyword) - # print(type(template[keyword])) - # print(template[keyword]) + # print("Reporting state of template before passing to HDF5 writing...") + # for keyword in template.keys(): + # print(keyword) + # print(template[keyword]) print("Forward instantiated template to the NXS writer...") return template diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_nexus_plots.py b/pynxtools/dataconverter/readers/apm/utils/apm_create_nx_default_plots.py similarity index 100% rename from pynxtools/dataconverter/readers/apm/utils/apm_nexus_plots.py rename to pynxtools/dataconverter/readers/apm/utils/apm_create_nx_default_plots.py diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_use_case_selector.py b/pynxtools/dataconverter/readers/apm/utils/apm_define_io_cases.py similarity index 65% rename from pynxtools/dataconverter/readers/apm/utils/apm_use_case_selector.py rename to pynxtools/dataconverter/readers/apm/utils/apm_define_io_cases.py index 2819281ba..26a73a1e9 100644 --- a/pynxtools/dataconverter/readers/apm/utils/apm_use_case_selector.py +++ b/pynxtools/dataconverter/readers/apm/utils/apm_define_io_cases.py @@ -36,11 +36,21 @@ def __init__(self, file_paths: Tuple[str] = None): eln injects additional metadata and eventually numerical data. """ self.case: Dict[str, list] = {} + self.eln: List[str] = [] + self.cfg: List[str] = [] + self.reconstruction: List[str] = [] + self.ranging: List[str] = [] self.is_valid = False self.supported_mime_types = [ "pos", "epos", "apt", "rrng", "rng", "txt", "yaml", "yml"] for mime_type in self.supported_mime_types: self.case[mime_type] = [] + + self.sort_files_by_mime_type(file_paths) + self.check_validity_of_file_combinations() + + def sort_files_by_mime_type(self, file_paths: Tuple[str] = None): + """Sort all input-files based on their mimetype to prepare validity check.""" for file_name in file_paths: index = file_name.lower().rfind(".") if index >= 0: @@ -48,15 +58,23 @@ def __init__(self, file_paths: Tuple[str] = None): if suffix in self.supported_mime_types: if file_name not in self.case[suffix]: self.case[suffix].append(file_name) - recon_input = 0 - range_input = 0 + + def check_validity_of_file_combinations(self): + """Check if this combination of types of files is supported.""" + recon_input = 0 # reconstruction relevant file e.g. POS, ePOS, APT + range_input = 0 # ranging definition file, e.g. RNG, RRNG + other_input = 0 # generic ELN or OASIS-specific configurations for mime_type, value in self.case.items(): if mime_type in ["pos", "epos", "apt"]: recon_input += len(value) - if mime_type in ["rrng", "rng", "txt"]: + elif mime_type in ["rrng", "rng", "txt"]: range_input += len(value) - eln_input = len(self.case["yaml"]) + len(self.case["yml"]) - if (recon_input == 1) and (range_input == 1) and (eln_input == 1): + elif mime_type in ["yaml", "yml"]: + other_input += len(value) + else: + continue + + if (recon_input == 1) and (range_input == 1) and (1 <= other_input <= 2): self.is_valid = True self.reconstruction: List[str] = [] self.ranging: List[str] = [] @@ -64,6 +82,12 @@ def __init__(self, file_paths: Tuple[str] = None): self.reconstruction += self.case[mime_type] for mime_type in ["rrng", "rng", "txt"]: self.ranging += self.case[mime_type] - self.eln: List[str] = [] + yml: List[str] = [] for mime_type in ["yaml", "yml"]: - self.eln += self.case[mime_type] + yml += self.case[mime_type] + for entry in yml: + if entry.endswith(".oasis.specific.yaml") \ + or entry.endswith(".oasis.specific.yml"): + self.cfg += [entry] + else: + self.eln += [entry] diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_example_data.py b/pynxtools/dataconverter/readers/apm/utils/apm_generate_synthetic_data.py similarity index 99% rename from pynxtools/dataconverter/readers/apm/utils/apm_example_data.py rename to pynxtools/dataconverter/readers/apm/utils/apm_generate_synthetic_data.py index 47c63f8f3..c34d30f7b 100644 --- a/pynxtools/dataconverter/readers/apm/utils/apm_example_data.py +++ b/pynxtools/dataconverter/readers/apm/utils/apm_generate_synthetic_data.py @@ -45,7 +45,7 @@ from pynxtools.dataconverter.readers.apm.utils.apm_versioning \ import NX_APM_ADEF_NAME, NX_APM_ADEF_VERSION, NX_APM_EXEC_NAME, NX_APM_EXEC_VERSION -from pynxtools.dataconverter.readers.apm.utils.apm_ranging_io \ +from pynxtools.dataconverter.readers.apm.utils.apm_load_ranging \ import add_unknown_iontype diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_generic_eln_io.py b/pynxtools/dataconverter/readers/apm/utils/apm_generic_eln_io.py deleted file mode 100644 index 41677a1eb..000000000 --- a/pynxtools/dataconverter/readers/apm/utils/apm_generic_eln_io.py +++ /dev/null @@ -1,409 +0,0 @@ -# -# Copyright The NOMAD Authors. -# -# This file is part of NOMAD. See https://nomad-lab.eu for further info. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -"""Wrapping multiple parsers for vendor files with NOMAD OASIS/ELN/YAML metadata.""" - -# pylint: disable=no-member - -import flatdict as fd - -import numpy as np - -import yaml - -from ase.data import chemical_symbols - -from pynxtools.dataconverter.readers.apm.utils.apm_versioning \ - import NX_APM_ADEF_NAME, NX_APM_ADEF_VERSION, NX_APM_EXEC_NAME, NX_APM_EXEC_VERSION - - -class NxApmNomadOasisElnSchemaParser: # pylint: disable=too-few-public-methods - """Parse eln_data.yaml dump file content generated from a NOMAD OASIS YAML. - - This parser implements a design where an instance of a specific NOMAD - custom schema ELN template is used to fill pieces of information which - are typically not contained in files from technology partners - (e.g. pos, epos, apt, rng, rrng, ...). Until now, this custom schema and - the NXapm application definition do not use a fully harmonized vocabulary. - Therefore, the here hardcoded implementation is needed which maps specifically - named pieces of information from the custom schema instance on named fields - in an instance of NXapm - - The functionalities in this ELN YAML parser do not check if the - instantiated template yields an instance which is compliant NXapm. - Instead, this task is handled by the generic part of the dataconverter - during the verification of the template dictionary. - """ - - def __init__(self, file_name: str, entry_id: int): - print(f"Extracting data from ELN file: {file_name}") - if (file_name.rsplit('/', 1)[-1].startswith("eln_data") - or file_name.startswith("eln_data")) and entry_id > 0: - self.entry_id = entry_id - self.file_name = file_name - with open(self.file_name, "r", encoding="utf-8") as stream: - self.yml = fd.FlatDict(yaml.safe_load(stream), delimiter=":") - else: - self.entry_id = 1 - self.file_name = "" - self.yml = {} - - def parse_entry(self, template: dict) -> dict: - """Copy data in entry section.""" - # print("Parsing entry...") - trg = f"/ENTRY[entry{self.entry_id}]/" - src = "entry" - if isinstance(self.yml[src], fd.FlatDict): - if (self.yml[f"{src}:attr_version"] == NX_APM_ADEF_VERSION) \ - and (self.yml[f"{src}:definition"] == NX_APM_ADEF_NAME): - template[f"{trg}@version"] = NX_APM_ADEF_VERSION - template[f"{trg}definition"] = NX_APM_ADEF_NAME - template[f"{trg}PROGRAM[program1]/program"] = NX_APM_EXEC_NAME - template[f"{trg}PROGRAM[program1]/program/@version"] = NX_APM_EXEC_VERSION - if ("program" in self.yml[src].keys()) \ - and ("program__attr_version" in self.yml[src].keys()): - template[f"{trg}PROGRAM[program2]/program"] \ - = self.yml[f"{src}:program"] - template[f"{trg}PROGRAM[program2]/program/@version"] \ - = self.yml[f"{src}:program__attr_version"] - - required_field_names = ["experiment_identifier", "run_number", - "operation_mode"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - template[f"{trg}{field_name}"] = self.yml[f"{src}:{field_name}"] - - optional_field_names = ["start_time", "end_time", - "experiment_description", "experiment_documentation"] - for field_name in optional_field_names: - if field_name in self.yml[src].keys(): - template[f"{trg}{field_name}"] = self.yml[f"{src}:{field_name}"] - - return template - - def parse_user(self, template: dict) -> dict: - """Copy data in user section.""" - # print("Parsing user...") - src = "user" - if "user" in self.yml.keys(): - if len(self.yml[src]) >= 1: - user_id = 1 - for user_list in self.yml[src]: - trg = f"/ENTRY[entry{self.entry_id}]/USER[user{user_id}]/" - - required_field_names = ["name"] - for field_name in required_field_names: - if field_name in user_list.keys(): - template[f"{trg}{field_name}"] = user_list[field_name] - - optional_field_names = ["email", "affiliation", "address", - "orcid", "orcid_platform", - "telephone_number", "role", - "social_media_name", "social_media_platform"] - for field_name in optional_field_names: - if field_name in user_list.keys(): - template[f"{trg}{field_name}"] = user_list[field_name] - user_id += 1 - - return template - - def parse_specimen(self, template: dict) -> dict: - """Copy data in specimen section.""" - # print("Parsing sample...") - src = "specimen" - trg = f"/ENTRY[entry{self.entry_id}]/specimen/" - if isinstance(self.yml[src], fd.FlatDict): - if (isinstance(self.yml[f"{src}:atom_types"], list)) \ - and (len(self.yml[src + ":atom_types"]) >= 1): - atom_types_are_valid = True - for symbol in self.yml[f"{src}:atom_types"]: - valid = isinstance(symbol, str) \ - and (symbol in chemical_symbols) and (symbol != "X") - if valid is False: - atom_types_are_valid = False - break - if atom_types_are_valid is True: - template[f"{trg}atom_types"] \ - = ", ".join(list(self.yml[f"{src}:atom_types"])) - - required_field_names = ["name", "sample_history", "preparation_date"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - template[f"{trg}{field_name}"] = self.yml[f"{src}:{field_name}"] - - optional_field_names = ["short_title", "description"] - for field_name in optional_field_names: - if field_name in self.yml[src].keys(): - template[f"{trg}{field_name}"] = self.yml[f"{src}:{field_name}"] - - return template - - def parse_instrument_header(self, template: dict) -> dict: - """Copy data in instrument_header section.""" - # print("Parsing instrument header...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/" - if isinstance(self.yml[src], fd.FlatDict): - required_field_names = ["instrument_name", "status"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - template[f"{trg}{field_name}"] = self.yml[f"{src}:{field_name}"] - optional_field_names = ["location"] - for field_name in optional_field_names: - if field_name in self.yml[src].keys(): - template[f"{trg}{field_name}"] = self.yml[f"{src}:{field_name}"] - - float_field_names = ["flight_path_length", "field_of_view"] - for field_name in float_field_names: - if (f"{field_name}:value" in self.yml[src].keys()) \ - and (f"{field_name}:unit" in self.yml[src].keys()): - template[f"{trg}{field_name}"] \ - = np.float64(self.yml[f"{src}:{field_name}:value"]) - template[f"{trg}{field_name}/@units"] \ - = self.yml[f"{src}:{field_name}:unit"] - - return template - - def parse_fabrication(self, template: dict) -> dict: - """Copy data in fabrication section.""" - # print("Parsing fabrication...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/FABRICATION[fabrication]/" - required_field_names = ["fabrication_vendor", "fabrication_model"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - suffix = field_name.replace("fabrication_", "") - template[f"{trg}{suffix}"] = self.yml[f"{src}:{field_name}"] - - optional_field_names = ["fabrication_identifier", "fabrication_capabilities"] - for field_name in optional_field_names: - if field_name in self.yml[src].keys(): - suffix = field_name.replace("fabrication_", "") - template[f"{trg}{suffix}"] = self.yml[f"{src}:{field_name}"] - - return template - - def parse_analysis_chamber(self, template: dict) -> dict: - """Copy data in analysis_chamber section.""" - # print("Parsing analysis chamber...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/analysis_chamber/" - float_field_names = ["analysis_chamber_pressure"] - for field_name in float_field_names: - if (f"{field_name}:value" in self.yml[src].keys()) \ - and (f"{field_name}:unit" in self.yml[src].keys()): - suffix = field_name.replace("analysis_chamber_", "") - template[f"{trg}{suffix}"] \ - = np.float64(self.yml[f"{src}:{field_name}:value"]) - template[f"{trg}{suffix}/@units"] = self.yml[f"{src}:{field_name}:unit"] - - return template - - def parse_reflectron(self, template: dict) -> dict: - """Copy data in reflectron section.""" - # print("Parsing reflectron...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/REFLECTRON[reflectron]/" - required_field_names = ["reflectron_applied"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - suffix = field_name.replace("reflectron_", "") - template[f"{trg}{suffix}"] = self.yml[f"{src}:{field_name}"] - - return template - - def parse_local_electrode(self, template: dict) -> dict: - """Copy data in local_electrode section.""" - # print("Parsing local electrode...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/local_electrode/" - required_field_names = ["local_electrode_name"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - suffix = field_name.replace("local_electrode_", "") - template[f"{trg}{suffix}"] = self.yml[f"{src}:{field_name}"] - - return template - - def parse_detector(self, template: dict) -> dict: - """Copy data in ion_detector section.""" - # print("Parsing detector...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/ion_detector/" - required_field_names = ["ion_detector_type", "ion_detector_name", - "ion_detector_model", "ion_detector_serial_number"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - suffix = field_name.replace("ion_detector_", "") - template[f"{trg}{suffix}"] = self.yml[f"{src}:{field_name}"] - - return template - - def parse_stage_lab(self, template: dict) -> dict: - """Copy data in stage lab section.""" - # print("Parsing stage_lab...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/stage_lab/" - if isinstance(self.yml[src], fd.FlatDict): - required_value_fields = ["stage_lab_base_temperature"] - for field_name in required_value_fields: - if (f"{field_name}:value" in self.yml[src].keys()) \ - and (f"{field_name}:unit" in self.yml[src].keys()): - suffix = field_name.replace("stage_lab_", "") - template[f"{trg}{suffix}"] \ - = np.float64(self.yml[f"{src}:{field_name}:value"]) - template[f"{trg}{suffix}/@units"] \ - = self.yml[f"{src}:{field_name}:unit"] - - return template - - def parse_specimen_monitoring(self, template: dict) -> dict: - """Copy data in specimen_monitoring section.""" - # print("Parsing specimen_monitoring...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/specimen_monitoring/" - if isinstance(self.yml[src], fd.FlatDict): - required_field_names = ["specimen_monitoring_detection_rate"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - template[f"{trg}detection_rate"] \ - = np.float64(self.yml[f"{src}:{field_name}"]) - float_field_names = ["specimen_monitoring_initial_radius", - "specimen_monitoring_shank_angle"] - for float_field_name in float_field_names: - if (f"{float_field_name}:value" in self.yml[src].keys()) \ - and (f"{float_field_name}:unit" in self.yml[src].keys()): - suffix = float_field_name.replace("specimen_monitoring_", "") - template[f"{trg}{suffix}"] \ - = np.float64(self.yml[f"{src}:{float_field_name}:value"]) - template[f"{trg}{suffix}/@units"] \ - = self.yml[f"{src}:{float_field_name}:unit"] - - return template - - def parse_control_software(self, template: dict) -> dict: - """Copy data in control software section.""" - # print("Parsing control software...") - src = "atom_probe" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/control_software/" - if isinstance(self.yml[src], fd.FlatDict): - prefix = "control_software" - if (f"{prefix}_program" in self.yml[src].keys()) \ - and (f"{prefix}_program__attr_version" in self.yml[src].keys()): - template[f"{trg}PROGRAM[program1]/program"] \ - = self.yml[f"{src}:{prefix}_program"] - template[f"{trg}PROGRAM[program1]/program/@version"] \ - = self.yml[f"{src}:{prefix}_program__attr_version"] - - return template - - def parse_pulser(self, template: dict) -> dict: - """Copy data in pulser section.""" - # print("Parsing pulser...") - src = "atom_probe:pulser" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/pulser/" - if isinstance(self.yml[src], fd.FlatDict): - if "pulse_mode" in self.yml[src].keys(): - pulse_mode = self.yml[f"{src}:pulse_mode"] - template[f"{trg}pulse_mode"] = pulse_mode - else: # can not parse selectively as pulse_mode was not documented - return template - - if "pulse_fraction" in self.yml[src].keys(): - template[f"{trg}pulse_fraction"] \ - = np.float64(self.yml[f"{src}:pulse_fraction"]) - - float_field_names = ["pulse_frequency"] - for field_name in float_field_names: - if (f"{field_name}:value" in self.yml[src].keys()) \ - and (f"{field_name}:unit" in self.yml[src].keys()): - template[f"{trg}{field_name}"] \ - = np.float64(self.yml[f"{src}:{field_name}:value"]) - template[f"{trg}{field_name}/@units"] \ - = self.yml[f"{src}:{field_name}:unit"] - # additionally required data for laser and laser_and_voltage runs - if pulse_mode != "voltage": - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/" \ - f"pulser/SOURCE[laser_source1]/" - if "laser_source_name" in self.yml[src].keys(): - template[f"{trg}name"] = self.yml[f"{src}:laser_source_name"] - - float_field_names = ["laser_source_wavelength", - "laser_source_power", - "laser_source_pulse_energy"] - for field_name in float_field_names: - if (f"{field_name}:value" in self.yml[src].keys()) \ - and (f"{field_name}:unit" in self.yml[src].keys()): - suffix = field_name.replace("laser_source_", "") - template[f"{trg}{suffix}"] \ - = np.float64(self.yml[f"{src}:{field_name}:value"]) - template[f"{trg}{suffix}/@units"] \ - = self.yml[f"{src}:{field_name}:unit"] - - return template - - def parse_reconstruction(self, template: dict) -> dict: - """Copy data in reconstruction section.""" - # print("Parsing reconstruction...") - src = "reconstruction" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/reconstruction/" - if ("program" in self.yml[src].keys()) \ - and ("program__attr_version" in self.yml[src].keys()): - template[f"{trg}PROGRAM[program1]/program"] \ - = self.yml[f"{src}:program"] - template[f"{trg}PROGRAM[program1]/program/@version"] \ - = self.yml[f"{src}:program__attr_version"] - - required_field_names = ["protocol_name", "parameter", - "crystallographic_calibration"] - for field_name in required_field_names: - if field_name in self.yml[src].keys(): - template[f"{trg}{field_name}"] = self.yml[f"{src}:{field_name}"] - - return template - - def parse_ranging(self, template: dict) -> dict: - """Copy data in ranging section.""" - # print("Parsing ranging...") - src = "ranging" - trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/ranging/" - if ("program" in self.yml[src].keys()) \ - and ("program__attr_version" in self.yml[src].keys()): - template[f"{trg}PROGRAM[program1]/program"] = self.yml[f"{src}:program"] - template[f"{trg}PROGRAM[program1]/program/@version"] \ - = self.yml[f"{src}:program__attr_version"] - - return template - - def report(self, template: dict) -> dict: - """Copy data from self into template the appdef instance.""" - self.parse_entry(template) - self.parse_user(template) - self.parse_specimen(template) - self.parse_instrument_header(template) - self.parse_fabrication(template) - self.parse_analysis_chamber(template) - self.parse_reflectron(template) - self.parse_local_electrode(template) - self.parse_detector(template) - self.parse_stage_lab(template) - self.parse_specimen_monitoring(template) - self.parse_control_software(template) - self.parse_pulser(template) - self.parse_reconstruction(template) - self.parse_ranging(template) - return template diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_load_deployment_specifics.py b/pynxtools/dataconverter/readers/apm/utils/apm_load_deployment_specifics.py new file mode 100644 index 000000000..87dc05950 --- /dev/null +++ b/pynxtools/dataconverter/readers/apm/utils/apm_load_deployment_specifics.py @@ -0,0 +1,57 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Load deployment-specific quantities.""" + +# pylint: disable=no-member + +import flatdict as fd + +import yaml + +from pynxtools.dataconverter.readers.apm.map_concepts.apm_deployment_specifics_to_nx_map \ + import NxApmDeploymentSpecificInput + +from pynxtools.dataconverter.readers.shared.map_concepts.mapping_functors \ + import apply_modifier, variadic_path_to_specific_path + + +class NxApmNomadOasisConfigurationParser: # pylint: disable=too-few-public-methods + """Parse deployment specific configuration.""" + + def __init__(self, file_name: str, entry_id: int): + print(f"Extracting data from deployment specific configuration file: {file_name}") + if (file_name.rsplit('/', 1)[-1].endswith(".oasis.specific.yaml") + or file_name.endswith(".oasis.specific.yml")) and entry_id > 0: + self.entry_id = entry_id + self.file_name = file_name + with open(self.file_name, "r", encoding="utf-8") as stream: + self.yml = fd.FlatDict(yaml.safe_load(stream), delimiter="/") + else: + self.entry_id = 1 + self.file_name = "" + self.yml = {} + + def report(self, template: dict) -> dict: + """Copy data from configuration applying mapping functors.""" + for nx_path, modifier in NxApmDeploymentSpecificInput.items(): + if nx_path not in ("IGNORE", "UNCLEAR"): + trg = variadic_path_to_specific_path(nx_path, [self.entry_id, 1]) + res = apply_modifier(modifier, self.yml) + if res is not None: + template[trg] = res + return template diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_load_generic_eln.py b/pynxtools/dataconverter/readers/apm/utils/apm_load_generic_eln.py new file mode 100644 index 000000000..ed36eec23 --- /dev/null +++ b/pynxtools/dataconverter/readers/apm/utils/apm_load_generic_eln.py @@ -0,0 +1,175 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Wrapping multiple parsers for vendor files with NOMAD OASIS/ELN/YAML metadata.""" + +# pylint: disable=no-member,duplicate-code,too-many-nested-blocks + +import flatdict as fd + +import yaml + +from ase.data import chemical_symbols + +from pynxtools.dataconverter.readers.apm.map_concepts.apm_eln_to_nx_map \ + import NxApmElnInput, NxUserFromListOfDict + +from pynxtools.dataconverter.readers.shared.map_concepts.mapping_functors \ + import variadic_path_to_specific_path, apply_modifier + +from pynxtools.dataconverter.readers.apm.utils.apm_parse_composition_table \ + import parse_composition_table + + +class NxApmNomadOasisElnSchemaParser: # pylint: disable=too-few-public-methods + """Parse eln_data.yaml dump file content generated from a NOMAD OASIS YAML. + + This parser implements a design where an instance of a specific NOMAD + custom schema ELN template is used to fill pieces of information which + are typically not contained in files from technology partners + (e.g. pos, epos, apt, rng, rrng, ...). Until now, this custom schema and + the NXapm application definition do not use a fully harmonized vocabulary. + Therefore, the here hardcoded implementation is needed which maps specifically + named pieces of information from the custom schema instance on named fields + in an instance of NXapm + + The functionalities in this ELN YAML parser do not check if the + instantiated template yields an instance which is compliant NXapm. + Instead, this task is handled by the generic part of the dataconverter + during the verification of the template dictionary. + """ + + def __init__(self, file_name: str, entry_id: int): + print(f"Extracting data from ELN file: {file_name}") + if (file_name.rsplit('/', 1)[-1].startswith("eln_data") + or file_name.startswith("eln_data")) and entry_id > 0: + self.entry_id = entry_id + self.file_name = file_name + with open(self.file_name, "r", encoding="utf-8") as stream: + self.yml = fd.FlatDict(yaml.safe_load(stream), delimiter="/") + else: + self.entry_id = 1 + self.file_name = "" + self.yml = {} + + def parse_sample_composition(self, template: dict) -> dict: + """Interpret human-readable ELN input to generate consistent composition table.""" + src = "sample/composition" + if src in self.yml.keys(): + if isinstance(self.yml[src], list): + dct = parse_composition_table(self.yml[src]) + + prfx = f"/ENTRY[entry{self.entry_id}]/sample/" \ + f"CHEMICAL_COMPOSITION[chemical_composition]" + unit = "at.-%" # the assumed default unit + if "normalization" in dct: + if dct["normalization"] in ["%", "at%", "at-%", "at.-%", "ppm", "ppb"]: + unit = "at.-%" + template[f"{prfx}/normalization"] = "atom_percent" + elif dct["normalization"] in ["wt%", "wt-%", "wt.-%"]: + unit = "wt.-%" + template[f"{prfx}/normalization"] = "weight_percent" + else: + return template + ion_id = 1 + for symbol in chemical_symbols[1::]: + # ase convention, chemical_symbols[0] == "X" + # to use ordinal number for indexing + if symbol in dct: + if isinstance(dct[symbol], tuple) and len(dct[symbol]) == 2: + trg = f"{prfx}/ION[ion{ion_id}]" + template[f"{trg}/name"] = symbol + template[f"{trg}/composition"] = dct[symbol][0] + template[f"{trg}/composition/@units"] = unit + if dct[symbol][1] is not None: + template[f"{trg}/composition_error"] = dct[symbol][1] + template[f"{trg}/composition_error/@units"] = unit + ion_id += 1 + return template + + def parse_user_section(self, template: dict) -> dict: + """Copy data from user section into template.""" + src = "user" + if src in self.yml.keys(): + if isinstance(self.yml[src], list): + if all(isinstance(entry, dict) for entry in self.yml[src]) is True: + user_id = 1 + # custom schema delivers a list of dictionaries... + for user_dict in self.yml[src]: + # ... for each of them inspect for fields mappable on NeXus + identifier = [self.entry_id, user_id] + # identifier to get instance NeXus path from variadic NeXus path + # try to find all quantities on the left-hand side of the mapping + # table and check if we can find these + for nx_path, modifier in NxUserFromListOfDict.items(): + if nx_path not in ("IGNORE", "UNCLEAR"): + trg = variadic_path_to_specific_path(nx_path, identifier) + res = apply_modifier(modifier, user_dict) + if res is not None: + template[trg] = res + user_id += 1 + return template + + def parse_laser_pulser_details(self, template: dict) -> dict: + """Copy data in pulser section.""" + # additional laser-specific details only relevant when the laser was used + src = "atom_probe/pulser/pulse_mode" + if src in self.yml.keys(): + if self.yml[src] == "voltage": + return template + else: + return template + src = "atom_probe/pulser/laser_source" + if src in self.yml.keys(): + if isinstance(self.yml[src], list): + if all(isinstance(entry, dict) for entry in self.yml[src]) is True: + laser_id = 1 + # custom schema delivers a list of dictionaries... + trg = f"/ENTRY[entry{self.entry_id}]/atom_probe/pulser" \ + f"/SOURCE[source{laser_id}]" + for laser_dict in self.yml[src]: + if "name" in laser_dict.keys(): + template[f"{trg}/name"] = laser_dict["name"] + quantities = ["power", "pulse_energy", "wavelength"] + for quant in quantities: + if isinstance(laser_dict[quant], dict): + if ("value" in laser_dict[quant].keys()) \ + and ("unit" in laser_dict[quant].keys()): + template[f"{trg}/{quant}"] \ + = laser_dict[quant]["value"] + template[f"{trg}/{quant}/@units"] \ + = laser_dict[quant]["unit"] + laser_id += 1 + return template + + def parse_other_sections(self, template: dict) -> dict: + """Copy data from custom schema into template.""" + for nx_path, modifier in NxApmElnInput.items(): + if nx_path not in ("IGNORE", "UNCLEAR"): + trg = variadic_path_to_specific_path(nx_path, [self.entry_id, 1]) + res = apply_modifier(modifier, self.yml) + if res is not None: + template[trg] = res + return template + + def report(self, template: dict) -> dict: + """Copy data from self into template the appdef instance.""" + self.parse_sample_composition(template) + self.parse_user_section(template) + self.parse_laser_pulser_details(template) + self.parse_other_sections(template) + return template diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_ranging_io.py b/pynxtools/dataconverter/readers/apm/utils/apm_load_ranging.py similarity index 100% rename from pynxtools/dataconverter/readers/apm/utils/apm_ranging_io.py rename to pynxtools/dataconverter/readers/apm/utils/apm_load_ranging.py diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_reconstruction_io.py b/pynxtools/dataconverter/readers/apm/utils/apm_load_reconstruction.py similarity index 100% rename from pynxtools/dataconverter/readers/apm/utils/apm_reconstruction_io.py rename to pynxtools/dataconverter/readers/apm/utils/apm_load_reconstruction.py diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_parse_composition_table.py b/pynxtools/dataconverter/readers/apm/utils/apm_parse_composition_table.py new file mode 100644 index 000000000..cf8f2bc56 --- /dev/null +++ b/pynxtools/dataconverter/readers/apm/utils/apm_parse_composition_table.py @@ -0,0 +1,179 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Parse human-readable composition infos from set of ELN string text fields.""" + +# pylint: disable=no-member,too-many-branches + +import re + +import numpy as np + +from ase.data import chemical_symbols + + +def parse_human_readable_composition_case_one(symbol): + """Handle specification of matrix or remainder element.""" + return ("define_matrix", symbol, None, None, None) + + +def parse_human_readable_composition_case_two(args, symbol): + """Handle case element and at.-% composition, no comp. stdev.""" + if args[1] in ["rem", "remainder", "matrix"]: + return ("define_matrix", symbol, None, None, None) + composition = re.match(r"[-+]?(?:\d*\.*\d+)", args[1]) + if composition is not None: + fraction = np.float64(composition[0]) + return ("add_element", symbol, fraction, None, "at.-%") + return (None, None, None, None, None) + + +def parse_human_readable_composition_case_three(human_input, args, symbol): + """Handle case element with different than default normalization, no comp. stdev.""" + composition = re.findall(r"[-+]?(?:\d*\.*\d+)", human_input) + if len(composition) == 1: + fraction = np.float64(composition[0]) + normalization = args[2] + if normalization in ["%", "at%", "at-%", "at.-%"]: + return ("add_element", symbol, fraction, None, "at.-%") + if normalization in ["wt%", "wt-%", "wt.-%"]: + return ("add_element", symbol, fraction, None, "wt.-%") + if normalization == "ppm": + return ("add_element", symbol, fraction / 1.0e4, None, "at.-%") + if normalization == "ppb": + return ("add_element", symbol, fraction / 1.0e7, None, "at.-%") + return (None, None, None, None, None) + + +def parse_human_readable_composition_case_four(human_input, symbol): + """Handle case at.-% normalization with comp. stdev.""" + composition = re.findall(r"[-+]?(?:\d*\.*\d+)", human_input) + composition_error = human_input.count("+-") + if (len(composition) == 2) and (composition_error == 1): + fraction = np.float64(composition[0]) + error = np.float64(composition[1]) + return ("add_element", symbol, fraction, error, "at.-%") + return (None, None, None, None, None) + + +def parse_human_readable_composition_case_five(human_input, args, symbol): + """Handle case with different than standard normalization and comp. stdev.""" + composition = re.findall(r"[-+]?(?:\d*\.*\d+)", human_input) + if (len(composition) == 2) and (human_input.count("+-") == 1): + fraction = np.float64(composition[0]) + error = np.float64(composition[1]) + normalization = args[2] + if normalization in ["%", "at%", "at-%", "at.-%"]: + return ("add_element", symbol, fraction, error, "at.-%") + if normalization in ["wt%", "wt-%", "wt.-%"]: + return ("add_element", symbol, fraction, error, "wt.-%") + if normalization == "ppm": + return ("add_element", symbol, fraction / 1.0e4, error / 1.0e4, "at.-%") + if normalization == "ppb": + return ("add_element", symbol, fraction / 1.0e7, error / 1.0e7, "at.-%") + return (None, None, None, None, None) + + +def parse_human_readable_composition_information(eln_input): + """Identify instruction to parse from eln_input to define composition table.""" + args = eln_input.split(" ") + if len(args) >= 1: + element_symbol = args[0] + # composition value argument fraction is always expected in percent + # i.e. human should have written 98 instead 0.98! + if (element_symbol != "X") and (element_symbol in chemical_symbols): + # case: "Mo" + if len(args) == 1: + return parse_human_readable_composition_case_one( + element_symbol) + # case: "Mo matrix" or "Mo 98.0", always assuming at.-%! + if len(args) == 2: + return parse_human_readable_composition_case_two( + args, element_symbol) + # case: "Mo 98 wt.-%", selectable at.-%, ppm, ppb, or wt.-%! + if len(args) == 3: + return parse_human_readable_composition_case_three( + eln_input, args, element_symbol) + # case: "Mo 98 +- 2", always assuming at.-%! + if len(args) == 4: + return parse_human_readable_composition_case_four( + eln_input, element_symbol) + # case: "Mo 98 wt.-% +- 2", selectable at.-%, ppm, ppb, or wt.-%! + if len(args) == 5: + return parse_human_readable_composition_case_five( + eln_input, args, element_symbol) + return (None, None, None, None, None) + + +def parse_composition_table(composition_list): + """Check if all the entries in the composition list yield a valid composition table.""" + composition_table = {} + # check that there are no contradictions or inconsistenc + for entry in composition_list: + instruction, element, composition, stdev, normalization \ + = parse_human_readable_composition_information(entry) + # print(f"{instruction}, {element}, {composition}, {stdev}, {normalization}") + + if instruction == "add_element": + if "normalization" not in composition_table: + if normalization is not None: + composition_table["normalization"] = normalization + else: + # as the normalization model is already defined, all following statements + # need to comply because we assume we are not allowed to mix atom and weight + # percent normalization in a composition_table + if normalization is not None: + if normalization != composition_table["normalization"]: + raise ValueError("Composition list is contradicting as it \ + mixes atom- with weight-percent normalization!") + + if element not in composition_table: + composition_table[element] = (composition, stdev) + else: + raise ValueError("Composition list is incorrectly formatted as if has \ + at least multiple lines for the same element!") + continue + if instruction == "define_matrix": + if element not in composition_table: + composition_table[element] = (None, None) + # because the fraction is unclear at this point + else: + raise ValueError("Composition list is contradicting as it includes \ + at least two statements what the matrix should be!") + + # determine remaining fraction + total_fractions = 0. + remainder_element = None + for keyword, tpl in composition_table.items(): + if keyword != "normalization": + if (tpl is not None) and (tpl != (None, None)): + total_fractions += tpl[0] + else: + remainder_element = keyword + # print(f"Total fractions {total_fractions}, remainder element {remainder_element}") + if remainder_element is None: + raise ValueError("Composition list inconsistent because either fractions for \ + elements do not add up to 100. or no symbol for matrix defined!") + + if composition_table: # means != {} + composition_table[remainder_element] = (1.0e2 - total_fractions, None) + # error propagation model required + + # document if reporting as percent or fractional values + composition_table["percent"] = True + + return composition_table diff --git a/pynxtools/dataconverter/readers/ellips/reader.py b/pynxtools/dataconverter/readers/ellips/reader.py index 58a921c2e..bd7c8bf19 100644 --- a/pynxtools/dataconverter/readers/ellips/reader.py +++ b/pynxtools/dataconverter/readers/ellips/reader.py @@ -19,14 +19,15 @@ import os from typing import Tuple, Any import math +from importlib.metadata import version import yaml import pandas as pd import numpy as np -# import h5py from pynxtools.dataconverter.readers.base.reader import BaseReader from pynxtools.dataconverter.readers.ellips.mock import MockEllips from pynxtools.dataconverter.helpers import extract_atom_types from pynxtools.dataconverter.readers.utils import flatten_and_replace, FlattenSettings +from pynxtools import get_nexus_version, get_nexus_version_hash DEFAULT_HEADER = {'sep': '\t', 'skip': 0} @@ -373,7 +374,7 @@ def write_scan_axis(name: str, values: list, units: str): header["Instrument/angle_of_incidence"] = unique_angles for axis in ["detection_angle", "incident_angle"]: - write_scan_axis(axis, unique_angles, "degrees") + write_scan_axis(axis, unique_angles, "degree") # Create mocked ellipsometry data template: if is_mock: @@ -416,7 +417,15 @@ def read(self, template = populate_template_dict(header, template) spectrum_type = header["Data"]["spectrum_type"] - spectrum_unit = header["Data"]["spectrum_unit"] + if header["Data"]["spectrum_unit"] == "Angstroms": + spectrum_unit = "angstrom" + else: + spectrum_unit = header["Data"]["spectrum_unit"] + # MK:: Carola, Ron, Flo, Tamas, Sandor refactor the above-mentioned construct + # there has to be a unit parsing control logic already at the level of this reader + # because test-data.data has improper units like Angstroms or degrees + # the fix above prevents that these incorrect units are get just blindly carried + # over into the nxs file and thus causing nomas to fail template[f"/ENTRY[entry]/plot/AXISNAME[{spectrum_type}]"] = \ {"link": f"/entry/data_collection/{spectrum_type}_spectrum"} template[f"/ENTRY[entry]/data_collection/NAME_spectrum[{spectrum_type}_spectrum]/@units"] \ @@ -432,16 +441,19 @@ def read(self, "link": "/entry/data_collection/measured_data", "shape": np.index_exp[index, dindx, :] } - template[f"/ENTRY[entry]/plot/DATA[{key}]/@units"] = "degrees" + # MK:: Carola, Ron, Flo, Tamas, Sandor refactor the following line + # using a proper unit parsing logic + template[f"/ENTRY[entry]/plot/DATA[{key}]/@units"] = "degree" if dindx == 0 and index == 0: template[f"/ENTRY[entry]/plot/DATA[{key}]/@long_name"] = \ - f"{plot_name} (degrees)" + f"{plot_name} (degree)" template[f"/ENTRY[entry]/plot/DATA[{key}_errors]"] = \ { "link": "/entry/data_collection/data_error", "shape": np.index_exp[index, dindx, :] } - template[f"/ENTRY[entry]/plot/DATA[{key}_errors]/@units"] = "degrees" + # MK:: Carola, Ron, Flo, Tamas, Sandor refactor the following line + template[f"/ENTRY[entry]/plot/DATA[{key}_errors]/@units"] = "degree" # Define default plot showing Psi and Delta at all angles: template["/@default"] = "entry" @@ -455,6 +467,16 @@ def read(self, for index in range(1, len(data_list)): template["/ENTRY[entry]/plot/@auxiliary_signals"] += data_list[index] + template["/ENTRY[entry]/definition"] = "NXellipsometry" + template["/ENTRY[entry]/definition/@url"] = ( + "https://github.com/FAIRmat-NFDI/nexus_definitions/" + f"blob/{get_nexus_version_hash()}/contributed_definitions/NXellipsometry.nxdl.xml" + ) + template["/ENTRY[entry]/definition/@version"] = get_nexus_version() + template["/ENTRY[entry]/program_name"] = "pynxtools" + template["/ENTRY[entry]/program_name/@version"] = version("pynxtools") + template["/ENTRY[entry]/program_name/@url"] = "https://github.com/FAIRmat-NFDI/pynxtools" + return template diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/README.md b/pynxtools/dataconverter/readers/em_nion/map_concepts/README.md similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/concepts/README.md rename to pynxtools/dataconverter/readers/em_nion/map_concepts/README.md diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/swift_display_items_to_nx_concepts.py b/pynxtools/dataconverter/readers/em_nion/map_concepts/swift_display_items_to_nx.py similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/concepts/swift_display_items_to_nx_concepts.py rename to pynxtools/dataconverter/readers/em_nion/map_concepts/swift_display_items_to_nx.py diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/generic_eln_mapping.py b/pynxtools/dataconverter/readers/em_nion/map_concepts/swift_eln_to_nx_map.py similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/concepts/generic_eln_mapping.py rename to pynxtools/dataconverter/readers/em_nion/map_concepts/swift_eln_to_nx_map.py diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/nx_image_ang_space.py b/pynxtools/dataconverter/readers/em_nion/map_concepts/swift_to_nx_image_ang_space.py similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/concepts/nx_image_ang_space.py rename to pynxtools/dataconverter/readers/em_nion/map_concepts/swift_to_nx_image_ang_space.py diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/nx_image_real_space.py b/pynxtools/dataconverter/readers/em_nion/map_concepts/swift_to_nx_image_real_space.py similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/concepts/nx_image_real_space.py rename to pynxtools/dataconverter/readers/em_nion/map_concepts/swift_to_nx_image_real_space.py diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/nx_spectrum_eels.py b/pynxtools/dataconverter/readers/em_nion/map_concepts/swift_to_nx_spectrum_eels.py similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/concepts/nx_spectrum_eels.py rename to pynxtools/dataconverter/readers/em_nion/map_concepts/swift_to_nx_spectrum_eels.py diff --git a/pynxtools/dataconverter/readers/em_nion/reader.py b/pynxtools/dataconverter/readers/em_nion/reader.py index ac785fda3..e226aca91 100644 --- a/pynxtools/dataconverter/readers/em_nion/reader.py +++ b/pynxtools/dataconverter/readers/em_nion/reader.py @@ -23,10 +23,10 @@ from pynxtools.dataconverter.readers.base.reader import BaseReader -from pynxtools.dataconverter.readers.em_nion.utils.use_case_selector \ +from pynxtools.dataconverter.readers.em_nion.utils.swift_define_io_cases \ import EmNionUseCaseSelector -from pynxtools.dataconverter.readers.em_nion.utils.em_generic_eln_io \ +from pynxtools.dataconverter.readers.em_nion.utils.swift_load_generic_eln \ import NxEmNionElnSchemaParser from pynxtools.dataconverter.readers.em_nion.utils.swift_zipped_project_parser \ diff --git a/pynxtools/dataconverter/readers/em_nion/utils/versioning.py b/pynxtools/dataconverter/readers/em_nion/utils/em_nion_versioning.py similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/utils/versioning.py rename to pynxtools/dataconverter/readers/em_nion/utils/em_nion_versioning.py diff --git a/pynxtools/dataconverter/readers/em_nion/utils/use_case_selector.py b/pynxtools/dataconverter/readers/em_nion/utils/swift_define_io_cases.py similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/utils/use_case_selector.py rename to pynxtools/dataconverter/readers/em_nion/utils/swift_define_io_cases.py diff --git a/pynxtools/dataconverter/readers/em_nion/utils/swift_dimscale_axes.py b/pynxtools/dataconverter/readers/em_nion/utils/swift_generate_dimscale_axes.py similarity index 96% rename from pynxtools/dataconverter/readers/em_nion/utils/swift_dimscale_axes.py rename to pynxtools/dataconverter/readers/em_nion/utils/swift_generate_dimscale_axes.py index cdc15e895..fbd9cfcf2 100644 --- a/pynxtools/dataconverter/readers/em_nion/utils/swift_dimscale_axes.py +++ b/pynxtools/dataconverter/readers/em_nion/utils/swift_generate_dimscale_axes.py @@ -23,7 +23,7 @@ import numpy as np -from pynxtools.dataconverter.readers.em_nion.concepts.swift_display_items_to_nx_concepts \ +from pynxtools.dataconverter.readers.em_nion.map_concepts.swift_display_items_to_nx \ import metadata_constraints, check_existence_of_required_fields # nexus_concept_dict diff --git a/pynxtools/dataconverter/readers/em_nion/utils/em_generic_eln_io.py b/pynxtools/dataconverter/readers/em_nion/utils/swift_load_generic_eln.py similarity index 95% rename from pynxtools/dataconverter/readers/em_nion/utils/em_generic_eln_io.py rename to pynxtools/dataconverter/readers/em_nion/utils/swift_load_generic_eln.py index 8be648477..4028e4986 100644 --- a/pynxtools/dataconverter/readers/em_nion/utils/em_generic_eln_io.py +++ b/pynxtools/dataconverter/readers/em_nion/utils/swift_load_generic_eln.py @@ -27,16 +27,16 @@ from ase.data import chemical_symbols -from pynxtools.dataconverter.readers.em_nion.utils.versioning \ +from pynxtools.dataconverter.readers.em_nion.utils.em_nion_versioning \ import NX_EM_NION_ADEF_NAME, NX_EM_NION_ADEF_VERSION -from pynxtools.dataconverter.readers.em_nion.utils.versioning \ +from pynxtools.dataconverter.readers.em_nion.utils.em_nion_versioning \ import NX_EM_NION_EXEC_NAME, NX_EM_NION_EXEC_VERSION -from pynxtools.dataconverter.readers.em_nion.concepts.swift_handle_nx_concepts \ +from pynxtools.dataconverter.readers.shared.map_concepts.mapping_functors \ import apply_modifier, variadic_path_to_specific_path -from pynxtools.dataconverter.readers.em_nion.concepts.generic_eln_mapping \ +from pynxtools.dataconverter.readers.em_nion.map_concepts.swift_eln_to_nx_map \ import NxEmElnInput, NxUserFromListOfDict, NxDetectorListOfDict, NxSample diff --git a/pynxtools/dataconverter/readers/em_nion/utils/swift_zipped_project_parser.py b/pynxtools/dataconverter/readers/em_nion/utils/swift_zipped_project_parser.py index f72f7d48c..17f74ba61 100644 --- a/pynxtools/dataconverter/readers/em_nion/utils/swift_zipped_project_parser.py +++ b/pynxtools/dataconverter/readers/em_nion/utils/swift_zipped_project_parser.py @@ -38,21 +38,21 @@ from pynxtools.dataconverter.readers.em_nion.utils.swift_uuid_to_file_name \ import uuid_to_file_name -from pynxtools.dataconverter.readers.em_nion.utils.swift_dimscale_axes \ +from pynxtools.dataconverter.readers.em_nion.utils.swift_generate_dimscale_axes \ import get_list_of_dimension_scale_axes -from pynxtools.dataconverter.readers.em_nion.concepts.swift_display_items_to_nx_concepts \ +from pynxtools.dataconverter.readers.em_nion.map_concepts.swift_display_items_to_nx \ import nexus_concept_dict, identify_nexus_concept_key -from pynxtools.dataconverter.readers.em_nion.concepts.swift_handle_nx_concepts \ +from pynxtools.dataconverter.readers.shared.map_concepts.mapping_functors \ import apply_modifier, variadic_path_to_specific_path -from pynxtools.dataconverter.readers.em_nion.concepts.nx_image_real_space \ +from pynxtools.dataconverter.readers.em_nion.map_concepts.swift_to_nx_image_real_space \ import NxImageRealSpaceDict -from pynxtools.dataconverter.readers.em_nion.utils.versioning \ +from pynxtools.dataconverter.readers.em_nion.utils.em_nion_versioning \ import NX_EM_NION_SWIFT_NAME, NX_EM_NION_SWIFT_VERSION -from pynxtools.dataconverter.readers.em_nion.utils.versioning \ +from pynxtools.dataconverter.readers.em_nion.utils.em_nion_versioning \ import NX_EM_NION_EXEC_NAME, NX_EM_NION_EXEC_VERSION diff --git a/pynxtools/dataconverter/readers/em_om/utils/image_transform.py b/pynxtools/dataconverter/readers/em_om/utils/image_transform.py index 34f98266f..7369ebef8 100644 --- a/pynxtools/dataconverter/readers/em_om/utils/image_transform.py +++ b/pynxtools/dataconverter/readers/em_om/utils/image_transform.py @@ -23,7 +23,6 @@ # f" how-do-i-make-pil-take-into-account-the-shortest-side-when-creating-a-thumbnail" import numpy as np -from PIL import Image as pil def thumbnail(img, size=300): @@ -39,16 +38,14 @@ def thumbnail(img, size=300): return img if old_width == old_height: - img.thumbnail((size, size), pil.ANTIALIAS) - + img.thumbnail((size, size)) elif old_height > old_width: ratio = float(old_width) / float(old_height) new_width = ratio * size - img = img.resize((int(np.floor(new_width)), size), pil.ANTIALIAS) - + img = img.resize((int(np.floor(new_width)), size)) elif old_width > old_height: ratio = float(old_height) / float(old_width) new_height = ratio * size - img = img.resize((size, int(np.floor(new_height))), pil.ANTIALIAS) + img = img.resize((size, int(np.floor(new_height)))) return img diff --git a/pynxtools/dataconverter/readers/example/reader.py b/pynxtools/dataconverter/readers/example/reader.py index 81b31b6de..83e7438b0 100644 --- a/pynxtools/dataconverter/readers/example/reader.py +++ b/pynxtools/dataconverter/readers/example/reader.py @@ -52,7 +52,8 @@ def read(self, for k in template.keys(): # The entries in the template dict should correspond with what the dataconverter # outputs with --generate-template for a provided NXDL file - if k.startswith("/ENTRY[entry]/required_group"): + if k.startswith("/ENTRY[entry]/required_group") \ + or k == "/ENTRY[entry]/optional_parent/req_group_in_opt_group": continue field_name = k[k.rfind("/") + 1:] @@ -61,6 +62,10 @@ def read(self, if f"{field_name}_units" in data.keys() and f"{k}/@units" in template.keys(): template[f"{k}/@units"] = data[f"{field_name}_units"] + template["required"]["/ENTRY[entry]/optional_parent/required_child"] = 1 + template["optional"][("/ENTRY[entry]/optional_parent/" + "req_group_in_opt_group/DATA[data]")] = [0, 1] + # Add non template key template["/ENTRY[entry]/does/not/exist"] = "None" template["/ENTRY[entry]/required_group/description"] = "A test description" diff --git a/pynxtools/dataconverter/readers/json_map/README.md b/pynxtools/dataconverter/readers/json_map/README.md index 4b4820c49..b81aec969 100644 --- a/pynxtools/dataconverter/readers/json_map/README.md +++ b/pynxtools/dataconverter/readers/json_map/README.md @@ -1,24 +1,63 @@ # JSON Map Reader -This reader allows you to convert either data from a .json file or an xarray exported as a .pickle using a flat .mapping.json file. +## What is this reader? + +This reader is designed to allow users of pynxtools to convert their existing data with the help of a map file. The map file tells the reader what to pick from your data files and convert them to FAIR NeXus files. The following formats are supported as input files: +* HDF5 (any extension works i.e. h5, hdf5, nxs, etc) +* JSON +* Python Dict Objects Pickled with [pickle](https://docs.python.org/3/library/pickle.html). These can contain [xarray.DataArray](https://docs.xarray.dev/en/stable/generated/xarray.DataArray.html) objects as well as regular Python types and Numpy types. It accepts any NXDL file that you like as long as your mapping file contains all the fields. Please use the --generate-template function of the dataconverter to create a .mapping.json file. ```console -user@box:~$ python convert.py --nxdl NXmynxdl --generate-template > mynxdl.mapping.json +user@box:~$ dataconverter --nxdl NXmynxdl --generate-template > mynxdl.mapping.json ``` There are some example files you can use: +[data.mapping.json](/tests/data/dataconverter/readers/json_map/data.mapping.json) -[data.mapping.json](/tests/data/tools/dataconverter/readers/json_map/data.mapping.json) - -[data.json](/tests/data/tools/dataconverter/readers/json_map/data.json) +[data.json](/tests/data/dataconverter/readers/json_map/data.json) ```console -user@box:~$ python convert.py --nxdl NXtest --input-file data.json --input-file data.mapping.json --reader json_map +user@box:~$ dataconverter --nxdl NXtest --input-file data.json --mapping data.mapping.json +``` + +##### [Example](/examples/json_map/) with HDF5 files. + +## The mapping.json file + +This file is designed to let you fill in the requirements of a NeXus Application Definition without writing any code. If you already have data in the formats listed above, you just need to use this mapping file to help the dataconverter pick your data correctly. + +The mapping files will always be based on the Template the dataconverter generates. See above on how to generate a mapping file. +The right hand side values of the Template keys are what you can modify. + +Here are the three different ways you can fill the right hand side of the Template keys: +* Write the nested path in your datafile. This is indicated by a leading `/` before the word `entry` to make `/entry/data/current_295C` below. +Example: + +```json + "/ENTRY[entry]/DATA[data]/current_295C": "/entry/data/current_295C", + "/ENTRY[entry]/NXODD_name/posint_value": "/a_level_down/another_level_down/posint_value", +``` + +* Write the values directly in the mapping file for missing data from your data file. + +```json + + "/ENTRY[entry]/PROCESS[process]/program": "Bluesky", + "/ENTRY[entry]/PROCESS[process]/program/@version": "1.6.7" +``` + +* Write JSON objects with a link key. This follows the same link mechanism that the dataconverter implements. In the context of this reader, you can only use external links to your data files. In the example below, `current.nxs` is an already existing HDF5 file that we link to in our new NeXus file without copying over the data. The format is as follows: +`"link": ":"` +Note: This only works for HDF5 files currently. + +```json + "/ENTRY[entry]/DATA[data]/current_295C": {"link": "current.nxs:/entry/data/current_295C"}, + "/ENTRY[entry]/DATA[data]/current_300C": {"link": "current.nxs:/entry/data/current_300C"}, ``` ## Contact person in FAIRmat for this reader -Sherjeel Shabih \ No newline at end of file +Sherjeel Shabih diff --git a/pynxtools/dataconverter/readers/json_map/reader.py b/pynxtools/dataconverter/readers/json_map/reader.py index 25123dc94..d17bb075b 100644 --- a/pynxtools/dataconverter/readers/json_map/reader.py +++ b/pynxtools/dataconverter/readers/json_map/reader.py @@ -21,10 +21,10 @@ import pickle import numpy as np import xarray +from mergedeep import merge from pynxtools.dataconverter.readers.base.reader import BaseReader from pynxtools.dataconverter.template import Template -from pynxtools.dataconverter.helpers import ensure_all_required_fields_exist from pynxtools.dataconverter import hdfdict @@ -58,9 +58,26 @@ def get_val_nested_keystring_from_dict(keystring, data): return data[current_key] +def get_attrib_nested_keystring_from_dict(keystring, data): + """ + Fetches all attributes from the data dict using path strings without a leading '/': + 'path/to/data/in/dict' + """ + if isinstance(keystring, (list, dict)): + return keystring + + key_splits = keystring.split("/") + parents = key_splits[:-1] + target = key_splits[-1] + for key in parents: + data = data[key] + + return data[target + "@"] if target + "@" in data.keys() else None + + def is_path(keystring): """Checks whether a given value in the mapping is a mapping path or just data""" - return isinstance(keystring, str) and keystring[0] == "/" + return isinstance(keystring, str) and len(keystring) > 0 and keystring[0] == "/" def fill_undocumented(mapping, template, data): @@ -69,6 +86,7 @@ def fill_undocumented(mapping, template, data): if is_path(value): template["undocumented"][path] = get_val_nested_keystring_from_dict(value[1:], data) + fill_attributes(path, value[1:], data, template) else: template["undocumented"][path] = value @@ -82,6 +100,7 @@ def fill_documented(template, mapping, template_provided, data): if is_path(map_str): template[path] = get_val_nested_keystring_from_dict(map_str[1:], data) + fill_attributes(path, map_str[1:], data, template) else: template[path] = map_str @@ -90,6 +109,14 @@ def fill_documented(template, mapping, template_provided, data): pass +def fill_attributes(path, map_str, data, template): + """Fills in the template all attributes found in the data object""" + attribs = get_attrib_nested_keystring_from_dict(map_str, data) + if attribs: + for key, value in attribs.items(): + template[path + "/@" + key] = value + + def convert_shapes_to_slice_objects(mapping): """Converts shape slice strings to slice objects for indexing""" for key in mapping: @@ -98,6 +125,25 @@ def convert_shapes_to_slice_objects(mapping): mapping[key]["shape"] = parse_slice(mapping[key]["shape"]) +def get_map_from_partials(partials, template, data): + """Takes a list of partials and returns a mapping dictionary to fill partials in our template""" + mapping: dict = {} + for partial in partials: + path = "" + template_path = "" + for part in partial.split("/")[1:]: + path = path + "/" + part + attribs = get_attrib_nested_keystring_from_dict(path[1:], data) + if template_path + "/" + part in template.keys(): + template_path = template_path + "/" + part + else: + nx_name = f"{attribs['NX_class'][2:].upper()}[{part}]" if attribs and "NX_class" in attribs else part # pylint: disable=line-too-long + template_path = template_path + "/" + nx_name + mapping[template_path] = path + + return mapping + + class JsonMapReader(BaseReader): """A reader that takes a mapping json file and a data file/object to return a template.""" @@ -119,10 +165,10 @@ def read(self, The mapping is only accepted as file.mapping.json to the inputs. """ data: dict = {} - mapping: dict = {} + mapping: dict = None + partials: list = [] - if objects: - data = objects[0] + data = objects[0] if objects else data for file_path in file_paths: file_extension = file_path[file_path.rindex("."):] @@ -143,23 +189,26 @@ def read(self, if is_hdf5: hdf = hdfdict.load(file_path) hdf.unlazy() - data = dict(hdf) + merge(data, dict(hdf)) + if "entry@" in data and "partial" in data["entry@"]: + partials.extend(data["entry@"]["partial"]) if mapping is None: - template = Template({x: "/hierarchical/path/in/your/datafile" for x in template}) - raise IOError("Please supply a JSON mapping file: --input-file" - " my_nxdl_map.mapping.json\n\n You can use this " - "template for the required fields: \n" + str(template)) + if len(partials) > 0: + mapping = get_map_from_partials(partials, template, data) + else: + template = Template({x: "/hierarchical/path/in/your/datafile" for x in template}) + raise IOError("Please supply a JSON mapping file: --input-file" + " my_nxdl_map.mapping.json\n\n You can use this " + "template for the required fields: \n" + str(template)) + new_template = Template() convert_shapes_to_slice_objects(mapping) - new_template = Template() fill_documented(new_template, mapping, template, data) fill_undocumented(mapping, new_template, data) - ensure_all_required_fields_exist(template, new_template) - return new_template diff --git a/pynxtools/dataconverter/readers/mpes/reader.py b/pynxtools/dataconverter/readers/mpes/reader.py index fce988f76..7d860765c 100644 --- a/pynxtools/dataconverter/readers/mpes/reader.py +++ b/pynxtools/dataconverter/readers/mpes/reader.py @@ -198,20 +198,6 @@ def handle_h5_and_json_file(file_paths, objects): f"but {file_path} does not match.", ) - if not os.path.exists(file_path): - file_path = os.path.join( - os.path.dirname(__file__), - "..", - "..", - "..", - "..", - "tests", - "data", - "dataconverter", - "readers", - "mpes", - file_path, - ) if not os.path.exists(file_path): raise FileNotFoundError( errno.ENOENT, @@ -252,11 +238,30 @@ def _getattr(obj, attr): if "index" in attr: axis = attr.split(".")[0] - return str(obj.dims.index(f"{axis}")) + return obj.dims.index(f"{axis}") return reduce(_getattr, [obj] + attr.split(".")) +def fill_data_indices_in_config(config_file_dict, x_array_loaded): + """Add data indices key value pairs to the config_file + dictionary from the xarray dimensions if not already + present. + """ + for key in list(config_file_dict): + if "*" in key: + value = config_file_dict[key] + for dim in x_array_loaded.dims: + new_key = key.replace("*", dim) + new_value = value.replace("*", dim) + + if new_key not in config_file_dict.keys() \ + and new_value not in config_file_dict.values(): + config_file_dict[new_key] = new_value + + config_file_dict.pop(key) + + class MPESReader(BaseReader): """MPES-specific reader class""" @@ -265,7 +270,7 @@ class MPESReader(BaseReader): # Whitelist for the NXDLs that the reader supports and can process supported_nxdls = ["NXmpes"] - def read( + def read( # pylint: disable=too-many-branches self, template: dict = None, file_paths: Tuple[str] = None, @@ -283,6 +288,8 @@ def read( eln_data_dict, ) = handle_h5_and_json_file(file_paths, objects) + fill_data_indices_in_config(config_file_dict, x_array_loaded) + for key, value in config_file_dict.items(): if isinstance(value, str) and ":" in value: diff --git a/pynxtools/dataconverter/readers/rii_database/reader.py b/pynxtools/dataconverter/readers/rii_database/reader.py index ae36b3884..32fb7c5fa 100644 --- a/pynxtools/dataconverter/readers/rii_database/reader.py +++ b/pynxtools/dataconverter/readers/rii_database/reader.py @@ -17,13 +17,12 @@ # """Convert refractiveindex.info yaml files to nexus""" from typing import Tuple, Any, Dict -import logging from pynxtools.dataconverter.readers.json_yml.reader import YamlJsonReader from pynxtools.dataconverter.readers.rii_database.dispersion_reader import ( DispersionReader, ) -from pynxtools.dataconverter.readers.utils import parse_json +from pynxtools.dataconverter.readers.utils import parse_json, handle_objects class RiiReader(YamlJsonReader): @@ -40,7 +39,7 @@ def __init__(self, *args, **kwargs): ".yaml": self.read_dispersion, ".json": self.parse_json_w_fileinfo, "default": lambda _: self.appdef_defaults(), - "objects": self.handle_objects, + "objects": self.handle_rii_objects, } def read_dispersion(self, filename: str): @@ -86,20 +85,9 @@ def parse_json_w_fileinfo(self, filename: str) -> Dict[str, Any]: return template - def handle_objects(self, objects: Tuple[Any]) -> Dict[str, Any]: + def handle_rii_objects(self, objects: Tuple[Any]) -> Dict[str, Any]: """Handle objects and generate template entries from them""" - if objects is None: - return {} - - template = {} - - for obj in objects: - if not isinstance(obj, dict): - logging.warning("Ignoring unknown object of type %s", type(obj)) - continue - - template.update(obj) - + template = handle_objects(objects) self.fill_dispersion_in(template) return template diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/swift_handle_nx_concepts.py b/pynxtools/dataconverter/readers/shared/map_concepts/mapping_functors.py similarity index 100% rename from pynxtools/dataconverter/readers/em_nion/concepts/swift_handle_nx_concepts.py rename to pynxtools/dataconverter/readers/shared/map_concepts/mapping_functors.py diff --git a/pynxtools/dataconverter/readers/shared/shared_utils.py b/pynxtools/dataconverter/readers/shared/shared_utils.py index 59d28ba6d..629e29a0f 100644 --- a/pynxtools/dataconverter/readers/shared/shared_utils.py +++ b/pynxtools/dataconverter/readers/shared/shared_utils.py @@ -22,15 +22,17 @@ # pylint: disable=E1101, R0801 -import git +# import git def get_repo_last_commit() -> str: """Identify the last commit to the repository.""" - repo = git.Repo(search_parent_directories=True) - sha = str(repo.head.object.hexsha) - if sha != "": - return sha + # repo = git.Repo(search_parent_directories=True) + # sha = str(repo.head.object.hexsha) + # if sha != "": + # return sha + # currently update-north-markus branch on nomad-FAIR does not pick up + # git even though git in the base image and gitpython in pynxtools deps return "unknown git commit id or unable to parse git reverse head" diff --git a/pynxtools/dataconverter/readers/transmission/reader.py b/pynxtools/dataconverter/readers/transmission/reader.py index 3d4f0e152..ccc94374e 100644 --- a/pynxtools/dataconverter/readers/transmission/reader.py +++ b/pynxtools/dataconverter/readers/transmission/reader.py @@ -22,7 +22,7 @@ from pynxtools.dataconverter.readers.json_yml.reader import YamlJsonReader import pynxtools.dataconverter.readers.transmission.metadata_parsers as mpars -from pynxtools.dataconverter.readers.utils import parse_json, parse_yml +from pynxtools.dataconverter.readers.utils import parse_json, parse_yml, handle_objects # Dictionary mapping metadata in the asc file to the paths in the NeXus file. @@ -254,6 +254,7 @@ class TransmissionReader(YamlJsonReader): ".yml": lambda fname: parse_yml(fname, CONVERT_DICT, REPLACE_NESTED), ".yaml": lambda fname: parse_yml(fname, CONVERT_DICT, REPLACE_NESTED), "default": lambda _: add_def_info(), + "objects": handle_objects, } diff --git a/pynxtools/dataconverter/readers/utils.py b/pynxtools/dataconverter/readers/utils.py index 23fbfbdd9..c1826d744 100644 --- a/pynxtools/dataconverter/readers/utils.py +++ b/pynxtools/dataconverter/readers/utils.py @@ -16,12 +16,15 @@ # limitations under the License. # """Utility functions for the NeXus reader classes.""" +import logging from dataclasses import dataclass, replace -from typing import List, Any, Dict, Optional +from typing import List, Any, Dict, Optional, Tuple from collections.abc import Mapping import json import yaml +logger = logging.getLogger(__name__) + @dataclass class FlattenSettings(): @@ -201,3 +204,20 @@ def parse_json(file_path: str) -> Dict[str, Any]: """ with open(file_path, "r", encoding="utf-8") as file: return json.load(file) + + +def handle_objects(objects: Tuple[Any]) -> Dict[str, Any]: + """Handle objects and generate template entries from them""" + if objects is None: + return {} + + template = {} + + for obj in objects: + if not isinstance(obj, dict): + logger.warning("Ignoring unknown object of type %s", type(obj)) + continue + + template.update(obj) + + return template diff --git a/pynxtools/dataconverter/readers/xrd/README.md b/pynxtools/dataconverter/readers/xrd/README.md new file mode 100644 index 000000000..53c64dfc7 --- /dev/null +++ b/pynxtools/dataconverter/readers/xrd/README.md @@ -0,0 +1,40 @@ +# XRD Reader +With the XRD reader, data from X-ray diffraction experiment can be read and written into a NeXus file (h5 type file with extension .nxs) according to NXxrd_pan application definition in [NeXus](https://github.com/FAIRmat-NFDI/nexus_definitions). There are a few different methods of measuring XRD: 1. θ:2θ instruments (e.g. Rigaku H3R), and 2. θ:θ instrument (e.g. PANalytical X’Pert Pro). The goal with this reader is to support both of these methods. + +**NOTE: This reader is still under development. As of now, the reader can only handle files with the extension `.xrdml` , obtained with PANalytical X’Pert Pro version 1.5 (method 2 described above). Currently we are wtoking to include more file types and file versions.** + +## Contact Person in FAIRmat +In principle, you can reach out to any member of Area B of the FAIRmat consortium, but Rubel Mozumder could be more reasonable for the early response. + +## Parsers +Though, in computer science, parser is a process that reads code into smaller parts (called tocken) with relations among tockens in a tree diagram. The process helps compiler to understand the tocken relationship of the source code. + +The XRD reader calls a program or class (called parser) that reads the experimenal input file and re-organises the different physical/experiment concepts or properties in a certain structure which is defined by developer. + +### class pynxtools.dataconverter.readers.xrd.xrd_parser.XRDMLParser + + **inputs:** + file_path: Full path of the input file. + + **Important method:** + get_slash_separated_xrd_dict() -> dict + + This method can be used to check if all the data from the input file have been read or not, it returns the slash separated dict as described. + + +### Other Parsers + **Coming Soon!!** + +### How To +The reader can be run from Jupyter-notebook or Jupyter-lab with the following command: + +```sh + ! dataconverter \ +--reader xrd \ +--nxdl NXxrd_pan \ +--input-file $ \ +--input-file $ \ +--output .nxs +``` + +An example file can be found here in GitLab in [nomad-remote-tools-hub](https://gitlab.mpcdf.mpg.de/nomad-lab/nomad-remote-tools-hub/-/tree/develop/docker/xrd) feel free to vist and try out the reader. diff --git a/pynxtools/dataconverter/readers/xrd/__init__.py b/pynxtools/dataconverter/readers/xrd/__init__.py new file mode 100644 index 000000000..d4ec4a8cc --- /dev/null +++ b/pynxtools/dataconverter/readers/xrd/__init__.py @@ -0,0 +1,15 @@ +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/pynxtools/dataconverter/readers/xrd/config.py b/pynxtools/dataconverter/readers/xrd/config.py new file mode 100644 index 000000000..4d3757b10 --- /dev/null +++ b/pynxtools/dataconverter/readers/xrd/config.py @@ -0,0 +1,117 @@ +"""This is config file that mainly maps nexus definition to data path in raw file.""" + +# pylint: disable=C0301 +xrdml = { + "/ENTRY[entry]/2theta_plot/chi": {"xrdml_1.5": {"value": "", + "@units": "", + "@chi_indices": 0}, + }, + "/ENTRY[entry]/2theta_plot/intensity": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/intensities", + "@units": "counts/s"} + }, + "/ENTRY[entry]/2theta_plot/omega": {"xrdml_1.5": {"value": "", + "@units": "", + "@omega_indices": 1}, + }, + "/ENTRY[entry]/2theta_plot/title": "Intensity Vs. Two Theta (deg.)", + "/ENTRY[entry]/2theta_plot/phi": {"xrdml_1.5": {"value": "", + "@units": "", + "@phi_indices": 0}, + }, + "/ENTRY[entry]/2theta_plot/two_theta": {"xrdml_1.5": {"value": "", + "@units": "deg", + "@two_theta_indices": 0}, + }, + "/ENTRY[entry]/COLLECTION[collection]/beam_attenuation_factors": {"xrdml_1.5": {"value": "/beamAttenuationFactors", + "@units": ""}, + }, + "/ENTRY[entry]/COLLECTION[collection]/omega/start": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_2/startPosition", + "@units": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_2/unit"}, + }, + "/ENTRY[entry]/COLLECTION[collection]/omega/end": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_2/endPosition", + "@units": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_2/unit"}, + }, + "/ENTRY[entry]/COLLECTION[collection]/omega/step": {"xrdml_1.5": {"value": "/xrdMeasurements/comment/entry_2/MinimumstepsizeOmega", + "@units": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_2/unit"}, + }, + "/ENTRY[entry]/COLLECTION[collection]/2theta/start": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_1/startPosition", + "@units": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_1/unit"}, + }, + "/ENTRY[entry]/COLLECTION[collection]/2theta/end": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_1/endPosition", + "@units": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_1/unit"}, + }, + "/ENTRY[entry]/COLLECTION[collection]/2theta/step": {"xrdml_1.5": {"value": "/xrdMeasurements/comment/entry_2/Minimumstepsize2Theta", + "@units": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/positions_1/unit"}, + }, + "/ENTRY[entry]/COLLECTION[collection]/count_time": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/commonCountingTime", + "@units": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/commonCountingTime/unit"}, + }, + "/ENTRY[entry]/COLLECTION[collection]/data_file": {"xrdml_1.5": {"value": ""} + }, + "/ENTRY[entry]/COLLECTION[collection]/goniometer_x": {"xrdml_1.5": {"value": "/X", + "@units": ""}, + }, + "/ENTRY[entry]/COLLECTION[collection]/goniometer_y": {"xrdml_1.5": {"value": "/Y", + "@units": ""}, + }, + "/ENTRY[entry]/COLLECTION[collection]/goniometer_z": {"xrdml_1.5": {"value": "/Z", + "@units": ""}, + }, + "/ENTRY[entry]/COLLECTION[collection]/measurement_type": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/measurementType", + "@units": ""}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/DETECTOR[detector]/integration_time": {"xrdml_1.5": {"value": "", + "@units": ""}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/DETECTOR[detector]/integration_time/@units": {"xrdml_1.5": {"value": "", + "@units": ""}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/DETECTOR[detector]/scan_axis": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/scanAxis", + "@units": ""}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/DETECTOR[detector]/scan_mode": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/mode", + "@units": ""}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_one": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/usedWavelength/kAlpha1", + "@units": "/xrdMeasurements/xrdMeasurement/usedWavelength/kAlpha1/unit"}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_two": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/usedWavelength/kAlpha2", + "@units": "/xrdMeasurements/xrdMeasurement/usedWavelength/kAlpha2/unit"}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/kbeta": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/usedWavelength/kBeta", + "@units": "/xrdMeasurements/xrdMeasurement/usedWavelength/kBeta/unit"}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/ratio_k_alphatwo_k_alphaone": {"xrdml_1.5": {"value": "", + "@units": ""} + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_current": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/incidentBeamPath/xRayTube/current", + "@units": "/xrdMeasurements/xrdMeasurement/incidentBeamPath/xRayTube/current/unit"} + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/source_peak_wavelength": {"xrdml_1.5": {"value": "", + "@units": ""} + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_material": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/incidentBeamPath/xRayTube/anodeMaterial", + "@units": ""}, + }, + "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_voltage": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/incidentBeamPath/xRayTube/tension", + "@units": "/xrdMeasurements/xrdMeasurement/incidentBeamPath/xRayTube/tension/unit"} + }, + "/ENTRY[entry]/SAMPLE[sample]/prepared_by": {"xrdml_1.5": {"value": ""} + }, + "/ENTRY[entry]/SAMPLE[sample]/sample_id": {"xrdml_1.5": {"value": ""}, + }, + "/ENTRY[entry]/SAMPLE[sample]/sample_mode": {"xrdml_1.5": {"value": ""}, + }, + "/ENTRY[entry]/SAMPLE[sample]/sample_name": {"xrdml_1.5": {"value": ""}, + }, + "/ENTRY[entry]/definition": "NXxrd_pan", + "/ENTRY[entry]/method": "X-Ray Diffraction (XRD)", + "/ENTRY[entry]/q_plot/intensity": {"xrdml_1.5": {"value": "/xrdMeasurements/xrdMeasurement/scan/dataPoints/intensities", + "@units": "counts/s"}, + }, + "/ENTRY[entry]/q_plot/q": {"xrdml_1.5": {"value": "", + "@units": ""}, + }, + "/@default": "entry", + "/ENTRY[entry]/@default": "2theta_plot", +} diff --git a/pynxtools/dataconverter/readers/xrd/reader.py b/pynxtools/dataconverter/readers/xrd/reader.py new file mode 100644 index 000000000..242498790 --- /dev/null +++ b/pynxtools/dataconverter/readers/xrd/reader.py @@ -0,0 +1,176 @@ +"""XRD reader.""" +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from typing import Tuple, Any, Dict, Union +import json +from pathlib import Path +import xml.etree.ElementTree as ET + +import yaml + +from pynxtools.dataconverter.helpers import (generate_template_from_nxdl, + validate_data_dict) +from pynxtools.dataconverter.template import Template +from pynxtools.dataconverter.readers.xrd.xrd_parser import parse_and_fill_template +from pynxtools.dataconverter.readers.utils import flatten_and_replace, FlattenSettings +from pynxtools.dataconverter.readers.base.reader import BaseReader + +CONVERT_DICT: Dict[str, str] = { + 'unit': '@units', + 'Instrument': 'INSTRUMENT[instrument]', + 'Source': 'SOURCE[source]', + 'Detector': 'DETECTOR[detector]', + 'Collection': 'COLLECTION[collection]', + 'Sample': 'SAMPLE[sample]', + 'version': '@version', + 'User': 'USER[user]', +} + + +# Global var to collect the root from get_template_from_nxdl_name() +# and use it in the the the varidate_data_dict() +ROOT: ET.Element = None +REPLACE_NESTED: Dict[str, Any] = {} +XRD_FILE_EXTENSIONS = [".xrdml", "xrdml", ".udf", ".raw", ".xye"] + + +def get_template_from_nxdl_name(nxdl_name): + """Generate template from nxdl name. + + Example of nxdl name could be NXxrd_pan. + Parameters + ---------- + nxdl_name : str + Name of nxdl file e.g. NXmpes + + Returns + ------- + Template + Empty template. + + Raises + ------ + ValueError + Error if nxdl file is not found. + """ + nxdl_file = nxdl_name + ".nxdl.xml" + current_path = Path(__file__) + def_path = current_path.parent.parent.parent.parent / 'definitions' + # Check contributed defintions + full_nxdl_path = Path(def_path, 'contributed_definitions', nxdl_file) + root = None + if full_nxdl_path.exists(): + root = ET.parse(full_nxdl_path).getroot() + else: + # Check application definition + full_nxdl_path = Path(def_path, 'applications', nxdl_file) + + if root is None and full_nxdl_path.exists(): + root = ET.parse(full_nxdl_path).getroot() + else: + full_nxdl_path = Path(def_path, 'base_classes', nxdl_file) + + if root is None and full_nxdl_path.exists(): + root = ET.parse(full_nxdl_path).getroot() + elif root is None: + raise ValueError("Need correct NXDL name") + + template = Template() + generate_template_from_nxdl(root=root, template=template) + return template + + +def get_template_from_xrd_reader(nxdl_name, file_paths): + """Get filled template from reader. + + Parameters + ---------- + nxdl_name : str + Name of nxdl definition + file_paths : Tuple[str] + Tuple of path of files. + + Returns + ------- + Template + Template which is a map from NeXus concept path to value. + """ + + template = get_template_from_nxdl_name(nxdl_name) + + data = XRDReader().read(template=template, + file_paths=file_paths) + validate_data_dict(template=template, data=data, nxdl_root=ROOT) + return data + + +# pylint: disable=too-few-public-methods +class XRDReader(BaseReader): + """Reader for XRD.""" + + supported_nxdls = ["NXxrd_pan"] + + def read(self, + template: dict = None, + file_paths: Tuple[str] = None, + objects: Tuple[Any] = None): + """General read menthod to prepare the template.""" + + if not isinstance(file_paths, tuple) and not isinstance(file_paths, list): + file_paths = (file_paths,) + filled_template: Union[Dict, None] = Template() + eln_dict: Union[Dict[str, Any], None] = None + config_dict: Dict = {} + xrd_file: str = "" + xrd_file_ext: str = "" + for file in file_paths: + ext = "".join(Path(file).suffixes) + if ext == '.json': + with open(file, mode="r", encoding="utf-8") as fl_obj: + config_dict = json.load(fl_obj) + elif ext in ['.yaml', '.yml']: + with open(file, mode="r", encoding="utf-8") as fl_obj: + eln_dict = flatten_and_replace( + FlattenSettings( + yaml.safe_load(fl_obj), + CONVERT_DICT, REPLACE_NESTED + ) + ) + elif ext in XRD_FILE_EXTENSIONS: + xrd_file_ext = ext + xrd_file = file + if xrd_file: + parse_and_fill_template(template, xrd_file, config_dict, eln_dict) + else: + raise ValueError(f"Allowed XRD experimental with extenstion from" + f" {XRD_FILE_EXTENSIONS} found {xrd_file_ext}") + + # Get rid of empty concept and cleaning up Template + for key, val in template.items(): + + if val is None: + del template[key] + else: + filled_template[key] = val + if not filled_template.keys(): + raise ValueError("Reader could not read anything! Check for input files and the" + " corresponding extention.") + return filled_template + + +READER = XRDReader diff --git a/pynxtools/dataconverter/readers/xrd/xrd_helper.py b/pynxtools/dataconverter/readers/xrd/xrd_helper.py new file mode 100644 index 000000000..40874be50 --- /dev/null +++ b/pynxtools/dataconverter/readers/xrd/xrd_helper.py @@ -0,0 +1,293 @@ +"""XRD helper stuffs.""" + +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import warnings +import numpy as np +from pynxtools.dataconverter.helpers import transform_to_intended_dt +from pynxtools.dataconverter.template import Template + + +class KeyValueNotFoundWaring(Warning): + """New Wanrning class""" + + +def get_a_value_or_warn(return_value="", + warning_catagory=KeyValueNotFoundWaring, + message="Key-value not found.", + stack_level=2): + """It returns a value that and rase the warning massage.""" + + warnings.warn(f"\033[1;31m {message}:\033[0m]", warning_catagory, stack_level) + return return_value + + +def check_unit(unit: str): + """Handle conflicted unit. + Some units comes with verdor file that do not follow correct format. + """ + if unit is None: + return unit + unit_map = {'Angstrom': '\u212B', + } + correct_unit = unit_map.get(unit, None) + if correct_unit is None: + return unit + return correct_unit + + +# pylint: disable=too-many-statements +def feed_xrdml_to_template(template, xrd_dict, eln_dict, file_term, config_dict=None): + """Fill template with data from xrdml type file. + + Parameters + ---------- + template : Dict + Template generated from nxdl definition file. + xrd_dict : dict + Just a dict mapping slash separated key to the data. The key is equivalent to the + path directing the location in data file. + eln_dict : dict + That brings the data from user especially using NeXus according to NeXus concept. + file_term : str + Terminological string to describe file ext. and version (e.g. xrdml_1.5) to find proper + dict from config file. + config_dict : Dict + Dictionary from config file that maps NeXus concept to data from different data file + versions. E.g. + { + "/ENTRY[entry]/2theta_plot/chi": {"file_exp": {"value": "", + "@units": ""},}, + "/ENTRY[entry]/2theta_plot/intensity": {"file_exp": {"value": "/detector", + "@units": ""},} + } + """ + + def fill_template_from_config_data(config_dict: dict, template: Template, + xrd_dict: dict, file_term: str) -> None: + """ + Parameters + ---------- + config_dict : dict + Python dict that is nested dict for different file versions. + e.g. + {"/ENTRY[entry]/2theta_plot/chi": {"file_exp": {"value": "", + "@units": ""},}, + "/ENTRY[entry]/2theta_plot/intensity": {"file_exp": {"value": "/detector", + "@units": ""},} + } + template : Template + + Return + ------ + None + """ + for nx_key, val in config_dict.items(): + if isinstance(val, dict): + raw_data_des: dict = val.get(file_term, None) + if raw_data_des is None: + raise ValueError(f"conflict file config file does not have any data map" + f" for file {file_term}") + # the field does not have any value + if not raw_data_des.get('value', None): + continue + # Note: path is the data path in raw file + for val_atr_key, path in raw_data_des.items(): + # data or field val + if val_atr_key == 'value': + template[nx_key] = xrd_dict.get(path, None) + elif path and val_atr_key == '@units': + template[nx_key + '/' + val_atr_key] = check_unit( + xrd_dict.get(path, None)) + # attr e.g. @AXISNAME + elif path and val_atr_key.startswith('@'): + template[nx_key + '/' + val_atr_key] = xrd_dict.get(path, None) + if not isinstance(val, dict) and isinstance(val, str): + template[nx_key] = val + + def two_theta_plot(): + + intesity = transform_to_intended_dt(template.get("/ENTRY[entry]/2theta_plot/intensity", + None)) + if intesity is not None: + intsity_len = np.shape(intesity)[0] + else: + raise ValueError("No intensity is found") + + two_theta_gr = "/ENTRY[entry]/2theta_plot/" + if template.get(f"{two_theta_gr}omega", None) is None: + omega_start = template.get("/ENTRY[entry]/COLLECTION[collection]/omega/start", None) + omega_end = template.get("/ENTRY[entry]/COLLECTION[collection]/omega/end", None) + + template["/ENTRY[entry]/2theta_plot/omega"] = np.linspace(float(omega_start), + float(omega_end), + intsity_len) + + if template.get(f"{two_theta_gr}two_theta", None) is None: + tw_theta_start = template.get("/ENTRY[entry]/COLLECTION[collection]/2theta/start", + None) + tw_theta_end = template.get("/ENTRY[entry]/COLLECTION[collection]/2theta/end", None) + template[f"{two_theta_gr}two_theta"] = np.linspace(float(tw_theta_start), + float(tw_theta_end), + intsity_len) + template[two_theta_gr + "/" + "@axes"] = ["two_theta"] + template[two_theta_gr + "/" + "@signal"] = "intensity" + + def q_plot(): + q_plot_gr = "/ENTRY[entry]/q_plot" + alpha_2 = template.get("/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_two", + None) + alpha_1 = template.get("/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_one", + None) + two_theta: np.ndarray = template.get("/ENTRY[entry]/2theta_plot/two_theta", None) + if two_theta is None: + raise ValueError("Two-theta data is not found") + if isinstance(two_theta, np.ndarray): + theta: np.ndarray = two_theta / 2 + ratio_k = "/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/ratio_k_alphatwo_k_alphaone" + if alpha_1 and alpha_2: + ratio = alpha_2 / alpha_1 + template[ratio_k] = ratio + lamda = ratio * alpha_1 + (1 - ratio) * alpha_2 + q_vec = (4 * np.pi / lamda) * np.sin(np.deg2rad(theta)) + template[q_plot_gr + "/" + "q_vec"] = q_vec + template[q_plot_gr + "/" + "@q_vec_indicies"] = 0 + template[q_plot_gr + "/" + "@axes"] = ["q_vec"] + + template[q_plot_gr + "/" + "@signal"] = "intensity" + + def handle_special_fields(): + """Some fields need special treatment.""" + + key = "/ENTRY[entry]/COLLECTION[collection]/goniometer_x" + gonio_x = template.get(key, None) + + template[key] = gonio_x[0] if (isinstance(gonio_x, np.ndarray) + and gonio_x.shape == (1,)) else gonio_x + + key = "/ENTRY[entry]/COLLECTION[collection]/goniometer_y" + gonio_y = template.get(key, None) + + template[key] = gonio_y[0] if (isinstance(gonio_y, np.ndarray) + and gonio_y.shape == (1,)) else gonio_y + + key = "/ENTRY[entry]/COLLECTION[collection]/goniometer_z" + gonio_z = template.get(key, None) + + template[key] = gonio_z[0] if (isinstance(gonio_z, np.ndarray) + and gonio_z.shape == (1,)) else gonio_z + + key = "/ENTRY[entry]/COLLECTION[collection]/count_time" + count_time = template.get(key, None) + + template[key] = count_time[0] if (isinstance(count_time, np.ndarray) + and count_time.shape == (1,)) else count_time + + fill_template_from_config_data(config_dict, template, + xrd_dict, file_term) + two_theta_plot() + q_plot() + handle_special_fields() + + fill_template_from_eln_data(eln_dict, template) + + +# pylint: disable=unused-argument +def feed_udf_to_template(template, xrd_dict, eln_dict, config_dict): + """_summary_ + + Parameters + ---------- + template : _type_ + _description_ + xrd_dict : _type_ + _description_ + eln_dict : _type_ + _description_ + config_dict : _type_ + _description_ + """ + + +def feed_raw_to_template(template, xrd_dict, eln_dict, config_dict): + """_summary_ + + Parameters + ---------- + template : _type_ + _description_ + xrd_dict : _type_ + _description_ + eln_dict : _type_ + _description_ + config_dict : _type_ + _description_ + """ + + +def feed_xye_to_template(template, xrd_dict, eln_dict, config_dict): + """_summary_ + + Parameters + ---------- + template : _type_ + _description_ + xrd_dict : _type_ + _description_ + eln_dict : _type_ + _description_ + config_dict : _type_ + _description_ + """ + + +def fill_template_from_eln_data(eln_data_dict, template): + """Fill out the template from dict that generated from eln yaml file. + Parameters: + ----------- + eln_data_dict : dict[str, Any] + Python dictionary from eln file. + template : dict[str, Any] + Return: + ------- + None + """ + + if eln_data_dict is None: + return + for e_key, e_val in eln_data_dict.items(): + template[e_key] = transform_to_intended_dt(e_val) + + +def fill_nxdata_from_xrdml(template, + xrd_flattend_dict, + dt_nevigator_from_config_file, + data_group_concept + ): + """_summary_ + + Parameters + ---------- + template : _type_ + _description_ + xrd_flattend_dict : _type_ + _description_ + dt_nevigator_from_config_file : _type_ + _description_ + data_group_concept : _type_ + _description_ + """ diff --git a/pynxtools/dataconverter/readers/xrd/xrd_parser.py b/pynxtools/dataconverter/readers/xrd/xrd_parser.py new file mode 100644 index 000000000..9d944cad7 --- /dev/null +++ b/pynxtools/dataconverter/readers/xrd/xrd_parser.py @@ -0,0 +1,448 @@ +""" +XRD file parser collection. +""" + +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, Tuple, Optional, List + +from pathlib import Path +import warnings +import xml.etree.ElementTree as ET # for XML parsing +from pynxtools.dataconverter.helpers import transform_to_intended_dt, remove_namespace_from_tag +from pynxtools.dataconverter.readers.xrd.xrd_helper import feed_xrdml_to_template + + +def fill_slash_sep_dict_from_nested_dict(parent_path: str, nested_dict: dict, + slash_sep_dict: dict): + """Convert a nested dict into slash separated dict. + + Extend slash_sep_dict by key (slash separated key) from nested dict. + + Parameters + ---------- + parent_path : str + Parent path to be appended at the starting of slash separated key. + nested_dict : dict + Dict nesting other dict. + slash_sep_dict : dict + Plain dict to be extended by key value generated from nested_dict. + """ + for key, val in nested_dict.items(): + slash_sep_path = parent_path + key + if isinstance(val, dict): + fill_slash_sep_dict_from_nested_dict(slash_sep_path, val, slash_sep_dict) + else: + slash_sep_dict[slash_sep_path] = val + + +class IgnoreNodeTextWarning(Warning): + """Special class to warn node text skip.""" + + +class XRDMLParser: + """Parser for xrdml file with the help of other XRD library e.g. panalytical_xml.""" + + def __init__(self, file_path): + """Construct XRDMLParser obj. + + Parameters + ---------- + file_path : str + Path of the file. + """ + # In future it can be utilised later it different versions of file + # self.__version = None + self.__xrd_dict = {} + self.__file_path = file_path + self.xrdml_version: str = "" + self.xml_root = ET.parse(self.__file_path).getroot() + self.find_version() + # Important note for key-val pair separator list: preceding elements have precedence on the + # on the following elements + self.key_val_pair_sprtr = (';', ',') + # Important note for key-val separator list: preceding elements have precedence on the + # on the following elements + self.key_val_sprtr = ('=', ':') + + def find_version(self): + """To find xrdml file version.""" + schema_loc = "{http://www.w3.org/2001/XMLSchema-instance}schemaLocation" + # str: 'http://www.xrdml.com/XRDMeasurement/1.5 + version = self.xml_root.get(schema_loc).split(' ')[0] + self.xrdml_version = version.split('/')[-1] + + def get_slash_separated_xrd_dict(self): + """Return a dict with slash separated key and value from xrd file. + + The key is the slash separated string path for nested xml elements. + + Returns + ------- + dict: + Dictionary where key maps xml nested elements by slash separated str. + """ + # To navigate different functions in future according to some parameters + # such as version, and data analysis module from panalytical_xml + self.handle_with_panalytical_module() + return self.__xrd_dict + + def handle_with_panalytical_module(self): + """Handeling XRDml file by parsing xml file and Pnanalytical_xml parser + + Panalytical module extends and constructs some array data from experiment settings + comes with xml file. + """ + self.parse_each_elm(parent_path='/', xml_node=self.xml_root) + nested_data_dict: Dict[str, any] = {} + # Note: To use panalytical lib + # Extract other numerical data e.g. 'hkl', 'Omega', '2Theta', CountTime etc + # using panalytical_xml module + # parsed_data = XRDMLFile(self.__file_path) + # nested_data_dict = parsed_data.scan.ddict + fill_slash_sep_dict_from_nested_dict('/', nested_data_dict, self.__xrd_dict) + + def process_node_text(self, parent_path, node_txt) -> None: + """Processing text of node + + Parameters + ---------- + parent_path : str + Starting str of the key when forming a string key. + node_txt : str + text from node. + + Returns + ------ + None + """ + key_val_pairs = [] + # get key-val pair + for sep in self.key_val_pair_sprtr: + if sep in node_txt: + key_val_pairs.extend(node_txt.split(sep)) + break + # Separate key-val, build full path and + # store them in dict + if key_val_pairs: + for key_val in key_val_pairs: + for k_v_sep in self.key_val_sprtr: + if k_v_sep in key_val: + key, val = key_val.split(k_v_sep) + key = key.replace(' ', '') + self.__xrd_dict['/'.join([parent_path, key])] = val + break + # Handling array data comes as node text + else: + try: + self.__xrd_dict[parent_path] = transform_to_intended_dt(node_txt) + except ValueError: + warnings.warn(f'Element text {node_txt} is ignored from parseing!', + IgnoreNodeTextWarning) + + def parse_each_elm(self, parent_path, xml_node, + multi_childs_tag: str = '', + tag_extensions: Optional[List[int]] = None): + """Check each xml element and send the element to intended function. + + Parameters + ---------- + parent_path : str + Path to be in the starting of the key composing from element e.g. '/'. + xml_node : XML.Element + Any element except process instruction nodes. + multi_childs_tag : str + Tag that is available on several child nodes. + tag_extension : List[int] + List of extension of the child tag if there are several childs having the same + tag. + + Returns + ------ + None + """ + + tag = remove_namespace_from_tag(xml_node.tag) + # Take care of special node of 'entry' tag + if tag == 'entry': + parent_path = self.parse_entry_elm(parent_path, xml_node, + multi_childs_tag, tag_extensions) + else: + parent_path = self.parse_general_elm(parent_path, xml_node, + multi_childs_tag, tag_extensions) + + _, multi_childs_tag = self.has_multi_childs_with_same_tag(xml_node) + # List of tag extensions for child nodes which have the same tag. + tag_extensions = [0] + for child in iter(xml_node): + if child is not None: + self.parse_each_elm(parent_path, child, + multi_childs_tag, tag_extensions) + + def has_multi_childs_with_same_tag(self, parent_node: ET.Element) -> Tuple[bool, str]: + """Check for multiple childs that have the same tag. + + Parameter: + ---------- + parent_node : ET.Element + Parent node that might has multiple childs with the same tag. + + Returns: + -------- + Tuple[bool, str] + (true if multiple childs with the same tag, tag). + """ + tag: str = None + for child in iter(parent_node): + temp_tag = remove_namespace_from_tag(child.tag) + if tag is None: + tag = temp_tag + else: + if tag == temp_tag: + return (True, tag) + + return (False, '') + + def parse_general_elm(self, parent_path, xml_node, + multi_childs_tag, tag_extensions: List[int]): + """Handle general element except entry element. + Parameters + ---------- + parent_path : str + Path to be in the starting of the key composing from element e.g. '/'. + xml_node : XML.Element + Any element except process instruction and entry nodes. + multi_childs_tag : str + Tag that is available on several siblings. + tag_extension : List[int] + List of extension of the shiblings tag if there are several shiblings having + the same tag. + + Returns + ------- + None + """ + + tag = remove_namespace_from_tag(xml_node.tag) + if tag == multi_childs_tag: + new_ext = tag_extensions[-1] + 1 + tag = tag + '_' + str(new_ext) + tag_extensions.append(new_ext) + + if parent_path == '/': + parent_path = parent_path + tag + else: + # New parent path ends with element tag + parent_path = '/'.join([parent_path, tag]) + + node_attr = xml_node.attrib + if node_attr: + for key, val in node_attr.items(): + # Some attr has namespace + key = remove_namespace_from_tag(key) + key = key.replace(' ', '_') + path_extend = '/'.join([parent_path, key]) + self.__xrd_dict[path_extend] = val + + node_txt = xml_node.text + if node_txt: + self.process_node_text(parent_path, node_txt) + + return parent_path + + def parse_entry_elm(self, parent_path: str, xml_node: ET.Element, + multi_childs_tag: str, tag_extensions: List[int]): + """Handle entry element. + + Parameters + ---------- + parent_path : str + Path to be in the starting of the key composing from element e.g. '/'. + xml_node : XML.Element + Any entry node. + multi_childs_tag : str + Tag that is available on several siblings. + tag_extension : List[int] + List of extension of the shiblings tag if there are several shiblings having + the same tag. + + Returns + ------- + str: + Parent path. + """ + + tag = remove_namespace_from_tag(xml_node.tag) + + if tag == multi_childs_tag: + new_ext = tag_extensions[-1] + 1 + tag_extensions.append(new_ext) + tag = tag + '_' + str(new_ext) + + if parent_path == '/': + parent_path = '/' + tag + else: + # Parent path ends with element tag + parent_path = '/'.join([parent_path, tag]) + + node_attr = xml_node.attrib + if node_attr: + for key, val in node_attr.items(): + # Some attributes have namespace + key = remove_namespace_from_tag(key) + path_extend = '/'.join([parent_path, key]) + self.__xrd_dict[path_extend] = val + + # In entry element text must get special care on it + node_txt = xml_node.text + if node_txt: + self.process_node_text(parent_path, node_txt) + + return parent_path + + +class FormatParser: + """A class to identify and parse different file formats.""" + + def __init__(self, file_path): + """Construct FormatParser obj. + + Parameters + ---------- + file_path : str + XRD file to be parsed. + + Returns + ------- + None + """ + self.file_path = file_path + self.file_parser = XRDMLParser(self.file_path) + # termilnological name of file to read config file + self.file_term = 'xrdml_' + self.file_parser.xrdml_version + + def get_file_format(self): + """Identifies the format of a given file. + + Returns: + -------- + str: + The file extension of the file. + """ + file_extension = ''.join(Path(self.file_path).suffixes) + return file_extension + + def parse_xrdml(self): + """Parses a Panalytical XRDML file. + + Returns + ------- + dict + A dictionary containing the parsed XRDML data. + """ + return self.file_parser.get_slash_separated_xrd_dict() + + def parse_panalytical_udf(self): + """Parse the Panalytical .udf file. + + Returns + ------- + None + Placeholder for parsing .udf files. + """ + + def parse_bruker_raw(self): + """Parse the Bruker .raw file. + + Returns + None + """ + + def parse_bruker_xye(self): + """Parse the Bruker .xye file. + + Returns + None + """ + + # pylint: disable=import-outside-toplevel + def parse_and_populate_template(self, template, config_dict, eln_dict): + """Parse xrd file into dict and fill the template. + + Parameters + ---------- + template : Template + NeXus template generated from NeXus application definitions. + xrd_file : str + Name of the xrd file. + config_dict : dict + A dict geenerated from python + eln_dict : dict + A dict generatd from eln yaml file. + Returns: + None + """ + + xrd_dict = self.parse() + if len(config_dict) == 0 and self.file_parser.xrdml_version == '1.5': + from pynxtools.dataconverter.readers.xrd.config import xrdml + config_dict = xrdml + feed_xrdml_to_template(template, xrd_dict, eln_dict, + file_term=self.file_term, config_dict=config_dict) + + def parse(self): + '''Parses the file based on its format. + + Returns: + dict + A dictionary containing the parsed data. + + Raises: + ValueError: If the file format is unsupported. + ''' + file_format = self.get_file_format() + slash_sep_dict = {} + if file_format == ".xrdml": + slash_sep_dict = self.parse_xrdml() + # elif file_format == ".udf": + # return self.parse_panalytical_udf() + # elif file_format == ".raw": + # return self.parse_bruker_raw() + # elif file_format == ".xye": + # return self.parse_bruker_xye() + # else: + # raise ValueError(f"Unsupported file format: {file_format}") + return slash_sep_dict + + +def parse_and_fill_template(template, xrd_file, config_dict, eln_dict): + """Parse xrd file and fill the template with data from that file. + + Parameters + ---------- + template : Template[dict] + Template gnenerated from nxdl definition. + xrd_file : str + Name of the xrd file with extension + config_dict : Dict + Dictionary from config.json or similar file. + eln_dict : Dict + Plain and '/' separated dictionary from yaml for ELN. + """ + + format_parser = FormatParser(xrd_file) + format_parser.parse_and_populate_template(template, config_dict, eln_dict) diff --git a/pynxtools/dataconverter/template.py b/pynxtools/dataconverter/template.py index 286cbaaed..fa6907d36 100644 --- a/pynxtools/dataconverter/template.py +++ b/pynxtools/dataconverter/template.py @@ -114,6 +114,24 @@ def get_documented(self): """Returns a dictionary of all the optionalities merged into one.""" return {**self.optional, **self.recommended, **self.required} + def __contains__(self, k): + """ + Supports in operator for the nested Template keys + """ + return any([ + k in self.optional, + k in self.recommended, + k in self.undocumented, + k in self.required + ]) + + def get(self, key: str, default=None): + """Proxies the get function to our internal __getitem__""" + try: + return self[key] + except KeyError: + return default + def __getitem__(self, k): """Handles how values are accessed from the Template object.""" # Try setting item in all else throw error. Does not append to default. @@ -130,7 +148,10 @@ def __getitem__(self, k): return self.required[k] except KeyError: return self.undocumented[k] - return self.get_optionality(k) + if k in ("required", "optional", "recommended", "undocumented"): + return self.get_optionality(k) + raise KeyError("Only paths starting with '/' or one of [optional_parents, " + "lone_groups, required, optional, recommended, undocumented] can be used.") def clear(self): """Clears all data stored in the Template object.""" @@ -171,12 +192,15 @@ def add_entry(self, entry_name): def __delitem__(self, key): """Delete a dictionary key or template key""" - if key in self.optional.keys(): del self.optional[key] - if key in self.required.keys(): + elif key in self.required.keys(): del self.required[key] - if key in self.recommended.keys(): + elif key in self.recommended.keys(): del self.recommended[key] + elif key in self.undocumented.keys(): + del self.undocumented[key] + else: + raise KeyError(f"{key} does not exist.") diff --git a/pynxtools/dataconverter/writer.py b/pynxtools/dataconverter/writer.py index 486d48ace..81b3045da 100644 --- a/pynxtools/dataconverter/writer.py +++ b/pynxtools/dataconverter/writer.py @@ -105,6 +105,7 @@ def handle_shape_entries(data, file, path): return layout +# pylint: disable=too-many-locals, inconsistent-return-statements def handle_dicts_entries(data, grp, entry_name, output_path, path): """Handle function for dictionaries found as value of the nexus file. @@ -163,7 +164,13 @@ def handle_dicts_entries(data, grp, entry_name, output_path, path): raise InvalidDictProvided("A dictionary was provided to the template but it didn't" " fall into any of the know cases of handling" " dictionaries. This occured for: " + entry_name) - return grp[entry_name] + # Check whether link has been stabilished or not + try: + return grp[entry_name] + except KeyError: + logger.warning("No path '%s' available to be linked.", path) + del grp[entry_name] + return None class Writer: @@ -171,26 +178,27 @@ class Writer: Args: data (dict): Dictionary containing the data to convert. - nxdl_path (str): Path to the nxdl file to use during conversion. + nxdl_f_path (str): Path to the nxdl file to use during conversion. output_path (str): Path to the output NeXus file. Attributes: data (dict): Dictionary containing the data to convert. - nxdl_path (str): Path to the nxdl file to use during conversion. + nxdl_f_path (str): Path to the nxdl file to use during conversion. output_path (str): Path to the output NeXus file. output_nexus (h5py.File): The h5py file object to manipulate output file. nxdl_data (dict): Stores xml data from given nxdl file to use during conversion. nxs_namespace (str): The namespace used in the NXDL tags. Helps search for XML children. """ - def __init__(self, data: dict = None, nxdl_path: str = None, - output_path: str = None, io_mode: str = "w"): + def __init__(self, data: dict = None, + nxdl_f_path: str = None, + output_path: str = None): """Constructs the necessary objects required by the Writer class.""" self.data = data - self.nxdl_path = nxdl_path + self.nxdl_f_path = nxdl_f_path self.output_path = output_path - self.output_nexus = h5py.File(self.output_path, io_mode) - self.nxdl_data = ET.parse(self.nxdl_path).getroot() + self.output_nexus = h5py.File(self.output_path, "w") + self.nxdl_data = ET.parse(self.nxdl_f_path).getroot() self.nxs_namespace = get_namespace(self.nxdl_data) def __nxdl_to_attrs(self, path: str = '/') -> dict: @@ -235,8 +243,9 @@ def ensure_and_get_parent_node(self, path: str, undocumented_paths) -> h5py.Grou return grp return self.output_nexus[parent_path_hdf5] - def write(self): - """Writes the NeXus file with previously validated data from the reader with NXDL attrs.""" + def _put_data_into_hdf5(self): + """Store data in hdf5 in in-memory file or file.""" + hdf5_links_for_later = [] def add_units_key(dataset, path): @@ -274,6 +283,9 @@ def add_units_key(dataset, path): for links in hdf5_links_for_later: dataset = handle_dicts_entries(*links) + if dataset is None: + # If target of a link is invalid to be linked + del self.data[links[-1]] for path, value in self.data.items(): try: @@ -288,6 +300,7 @@ def add_units_key(dataset, path): if entry_name[0] != "@": path_hdf5 = helpers.convert_data_dict_path_to_hdf5_path(path) + add_units_key(self.output_nexus[path_hdf5], path) else: # consider changing the name here the lvalue can also be group! @@ -297,4 +310,9 @@ def add_units_key(dataset, path): raise IOError(f"Unknown error occured writing the path: {path} " f"with the following message: {str(exc)}") from exc - self.output_nexus.close() + def write(self): + """Writes the NeXus file with previously validated data from the reader with NXDL attrs.""" + try: + self._put_data_into_hdf5() + finally: + self.output_nexus.close() diff --git a/pynxtools/eln_mapper/README.md b/pynxtools/eln_mapper/README.md new file mode 100644 index 000000000..13f759466 --- /dev/null +++ b/pynxtools/eln_mapper/README.md @@ -0,0 +1,19 @@ +# ELN generator +This is a helper tool for generating eln +- The simple eln generator that can be used in a console or jupyter-notebook +- Scheme based eln generator that can be used in NOMAD and the eln can be used as a custom scheme in NOMAD. + +``` +$ eln_generator --options + +Options: + --nxdl TEXT Name of NeXus definition without extension + (.nxdl.xml). [required] + --skip-top-levels INTEGER To skip upto a level of parent hierarchical structure. + E.g. for default 1 the part Entry[ENTRY] from + /Entry[ENTRY]/Instrument[INSTRUMENT]/... will + be skiped. [default: 1] + --output-file TEXT Name of output file. + --eln-type [eln|scheme_eln] Choose a type from the eln or scheme_eln. [required] + --help Show this message and exit. +``` diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_utils.py b/pynxtools/eln_mapper/__init__.py similarity index 59% rename from pynxtools/dataconverter/readers/apm/utils/apm_utils.py rename to pynxtools/eln_mapper/__init__.py index f04c329ee..7f1819634 100644 --- a/pynxtools/dataconverter/readers/apm/utils/apm_utils.py +++ b/pynxtools/eln_mapper/__init__.py @@ -1,4 +1,3 @@ -# # Copyright The NOMAD Authors. # # This file is part of NOMAD. See https://nomad-lab.eu for further info. @@ -15,12 +14,3 @@ # See the License for the specific language governing permissions and # limitations under the License. # -"""Set of utility tools for parsing file formats used by atom probe.""" - -# pylint: disable=no-member - -# ifes_apt_tc_data_modeling replaces now the previously here stored -# convenience functions which translated human-readable ion names into -# isotope_vector descriptions and vice versa as proposed by M. Kuehbach et al. in -# DOI: 10.1017/S1431927621012241 to the human-readable ion names which are use -# in P. Felfer et al."s atom probe toolbox diff --git a/pynxtools/eln_mapper/eln.py b/pynxtools/eln_mapper/eln.py new file mode 100644 index 000000000..078dd4d18 --- /dev/null +++ b/pynxtools/eln_mapper/eln.py @@ -0,0 +1,189 @@ +"""For functions that directly or indirectly help to for rendering ELN. +Note that this not schema eln that is rendered to Nomad rather the eln that +is generated by schema eln.""" + +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import re +from typing import Any, Dict +import xml.etree.ElementTree as ET +import yaml + +from pynxtools.dataconverter.helpers import generate_template_from_nxdl +from pynxtools.dataconverter.template import Template +from pynxtools.nexus.nexus import get_nexus_definitions_path + + +def retrieve_nxdl_file(nexus_def: str) -> str: + """Retrive full path of nexus file. + + Parameters + ---------- + nexus_def : str + Name of nexus definition e.g. NXmpes + + Returns + ------- + str + Returns full path of file e.g. /NXmpes.nxdl.xml + + Raises + ------ + ValueError + Need correct definition name, e.g. NXmpes not NXmpes.nxdl.xml + """ + definition_path = get_nexus_definitions_path() + + def_path = os.path.join(definition_path, + 'contributed_definitions', + f"{nexus_def}.nxdl.xml") + if os.path.exists(def_path): + return def_path + + def_path = os.path.join(definition_path, + 'base_definitions', + f"{nexus_def}.nxdl.xml") + + if os.path.exists(def_path): + return def_path + + def_path = os.path.join(definition_path, + 'applications', + f"{nexus_def}.nxdl.xml") + if os.path.exists(def_path): + return def_path + + raise ValueError("Incorrect definition is rendered, try with correct definition name.") + + +def get_empty_template(nexus_def: str) -> Template: + """Generate eln in yaml file. + + Parameters + ---------- + nexus_def : str + Name of NeXus definition e.g. NXmpes + + Return + ------ + Template + """ + + nxdl_file = retrieve_nxdl_file(nexus_def) + nxdl_root = ET.parse(nxdl_file).getroot() + template = Template() + generate_template_from_nxdl(nxdl_root, template) + + return template + + +def take_care_of_special_concepts(key: str): + """For some special concepts such as @units.""" + def unit_concept(): + return {'value': None, + 'unit': None} + + if key == '@units': + return unit_concept() + + +def get_recursive_dict(concatenated_key: str, + recursive_dict: Dict[str, Any], + level_to_skip: int) -> None: + """Get recursive dict for concatenated string of keys. + + Parameters + ---------- + concatenated_key : str + String of keys separated by slash + recursive_dict : dict + Dict to recursively stroring data. + level_to_skip : int + Integer to skip the level of hierarchical level + """ + # splitig keys like: '/entry[ENTRY]/position[POSITION]/xx'. + # skiping the first empty '' and top parts as directed by users. + key_li = concatenated_key.split('/')[level_to_skip + 1:] + # list of key for special consideration + sp_key_li = ['@units'] + last_key = "" + last_dict = {} + for key in key_li: + if '[' in key and '/' not in key: + key = re.findall(r'\[(.*?)\]', key,)[0].capitalize() + if not key: + continue + last_key = key + last_dict = recursive_dict + if key in recursive_dict: + if recursive_dict[key] is None: + recursive_dict[key] = {} + recursive_dict = recursive_dict[key] + + else: + if key in sp_key_li: + recursive_dict.update(take_care_of_special_concepts(key)) + else: + recursive_dict = recursive_dict[key] + else: + if key in sp_key_li: + recursive_dict.update(take_care_of_special_concepts(key)) + else: + recursive_dict[key] = {} + recursive_dict = recursive_dict[key] + # For special key cleaning parts occurs inside take_care_of_special_concepts func. + if last_key not in sp_key_li: + last_dict[last_key] = None + + +def generate_eln(nexus_def: str, eln_file: str = '', level_to_skip: int = 1) -> None: + """Genrate eln from application definition. + + Parameters + ---------- + nexus_def : str + _description_ + eln_file : str + _description_ + + Returns: + None + """ + + template = get_empty_template(nexus_def) + recursive_dict: Dict[str, Any] = {} + for key, _ in template.items(): + get_recursive_dict(key, recursive_dict, level_to_skip) + + name_split = eln_file.rsplit('.') + if not eln_file: + if nexus_def[0:2] == 'NX': + raw_name = nexus_def[2:] + eln_file = raw_name + '.yaml' + + elif len(name_split) == 1: + eln_file = eln_file + '.yaml' + + elif len(name_split) == 2 and name_split[1] == 'yaml': + pass + else: + raise ValueError("Eln file should come with 'yaml' extension or without extension.") + + with open(eln_file, encoding='utf-8', mode='w') as eln_f: + yaml.dump(recursive_dict, sort_keys=False, stream=eln_f) diff --git a/pynxtools/eln_mapper/eln_mapper.py b/pynxtools/eln_mapper/eln_mapper.py new file mode 100644 index 000000000..d23918f73 --- /dev/null +++ b/pynxtools/eln_mapper/eln_mapper.py @@ -0,0 +1,75 @@ +"""This module Generate ELN in a hierarchical format according to NEXUS definition.""" +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import click +from pynxtools.eln_mapper.eln import generate_eln +from pynxtools.eln_mapper.scheme_eln import generate_scheme_eln + + +@click.command() +@click.option( + '--nxdl', + required=True, + help="Name of NeXus definition without extension (.nxdl.xml)." +) +@click.option( + '--skip-top-levels', + default=1, + required=False, + type=int, + show_default=True, + help=("To skip the level of parent hierarchy level. E.g. for default 1 the part" + "Entry[ENTRY] from /Entry[ENTRY]/Instrument[INSTRUMENT]/... will be skiped.") +) +@click.option( + '--output-file', + required=False, + default='eln_data', + help=('Name of file that is neede to generated output file.') +) +@click.option( + '--eln-type', + required=True, + type=click.Choice(['eln', 'scheme_eln'], case_sensitive=False), + default='eln' +) +def get_eln(nxdl: str, + skip_top_levels: int, + output_file: str, + eln_type: str): + """To generate ELN in yaml file format. + + Parameters + ---------- + + nxdl : str + Name of NeXus definition e.g. NXmpes + skip_top_levels : int + To skip hierarchical levels + output_file : str + Name of the output file. + """ + eln_type = eln_type.lower() + if eln_type == 'eln': + generate_eln(nxdl, output_file, skip_top_levels) + elif eln_type == 'scheme_eln': + generate_scheme_eln(nxdl, eln_file_name=output_file) + + +if __name__ == "__main__": + get_eln().parse() # pylint: disable=no-value-for-parameter diff --git a/pynxtools/eln_mapper/scheme_eln.py b/pynxtools/eln_mapper/scheme_eln.py new file mode 100644 index 000000000..1152bbd08 --- /dev/null +++ b/pynxtools/eln_mapper/scheme_eln.py @@ -0,0 +1,281 @@ +"""This module intended to generate schema eln which usually randeredto NOMAD.""" + +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Dict, Any +import xml.etree.ElementTree as ET +import yaml +from pynxtools.eln_mapper.eln import retrieve_nxdl_file +from pynxtools.dataconverter.helpers import remove_namespace_from_tag + + +NEXUS_TYPE_TO_NUMPY_TYPE = {'NX_CHAR': {'convert_typ': 'str', + 'component_nm': 'StringEditQuantity', + 'default_unit_display': ''}, + 'NX_BOOLEAN': {'convert_typ': 'bool', + 'component_nm': 'BoolEditQuantity', + 'default_unit_display': ''}, + 'NX_DATE_TIME': {'convert_typ': 'Datetime', + 'component_nm': 'DateTimeEditQuantity', + 'default_unit_display': ''}, + 'NX_FLOAT': {'convert_typ': 'np.float64', + 'component_nm': 'NumberEditQuantity', + 'default_unit_display': ''}, + 'NX_INT': {'convert_typ': 'int', + 'component_nm': 'NumberEditQuantity', + 'default_unit_display': ''}, + 'NX_NUMBER': {'convert_typ': 'np.float64', + 'component_nm': 'NumberEditQuantity', + 'default_unit_display': ''}, + '': {'convert_typ': '', + 'component_nm': '', + 'default_unit_display': ''}, + } + + +def construct_field_structure(fld_elem, quntities_dict): + """Construct field structure such as unit, value. + Parameters + ---------- + elem : _type_ + _description_ + quntities_dict : _type_ + _description_ + """ + elm_attr = fld_elem.attrib + fld_nm = elm_attr['name'].lower() + quntities_dict[fld_nm] = {} + fld_dict = quntities_dict[fld_nm] + + # handle type + if 'type' in elm_attr: + nx_fld_typ = elm_attr['type'] + else: + nx_fld_typ = 'NX_CHAR' + + if nx_fld_typ in NEXUS_TYPE_TO_NUMPY_TYPE: + cov_fld_typ = NEXUS_TYPE_TO_NUMPY_TYPE[nx_fld_typ]['convert_typ'] + + fld_dict['type'] = cov_fld_typ + if 'units' in elm_attr: + fld_dict['unit'] = f"" + fld_dict['value'] = "" + + # handle m_annotation + m_annotation = {'m_annotations': {'eln': + {'component': + NEXUS_TYPE_TO_NUMPY_TYPE[nx_fld_typ]['component_nm'], + 'defaultDisplayUnit': + (NEXUS_TYPE_TO_NUMPY_TYPE[nx_fld_typ] + ['default_unit_display'])}}} + fld_dict.update(m_annotation) + + # handle description + construct_decription(fld_elem, fld_dict) + + +def construct_decription(elm: ET.Element, concept_dict: Dict) -> None: + """Collect doc from concept doc. + """ + desc_text = '' + for child_elm in elm: + tag = remove_namespace_from_tag(child_elm.tag) + if tag == 'doc': + desc_text = child_elm.text + desc_text = ' '.join([x.strip() for x in desc_text.split('\n')]) + break + + concept_dict['description'] = desc_text + + +def construct_group_structure(grp_elm: ET.Element, subsections: Dict) -> None: + """To construct group structure as follows: + : + section: + m_annotations: + eln: + overview: true + + Parameters + ---------- + elm : ET.Element + Group element + subsections : Dict + Dict to include group recursively + """ + + default_m_annot = {'m_annotations': {'eln': {'overview': True}}} + + elm_attrib = grp_elm.attrib + grp_desig = "" + if 'name' in elm_attrib: + grp_desig = elm_attrib['name'].capitalize() + elif 'type' in elm_attrib: + grp_desig = elm_attrib['type'][2:].capitalize() + + subsections[grp_desig] = {} + grp_dict = subsections[grp_desig] + + # add setion in group + grp_dict['section'] = {} + section = grp_dict['section'] + section.update(default_m_annot) + + # pass the grp elment for recursive search + scan_xml_element_recursively(grp_elm, section) + + +def _should_skip_iteration(elm: ET.Element) -> bool: + """Define some elements here that should be skipped. + + Parameters + ---------- + elm : ET.Element + The element to investigate to skip + """ + attr = elm.attrib + elm_type = '' + if 'type' in attr: + elm_type = attr['type'] + if elm_type in ['NXentry']: + return True + return False + + +def scan_xml_element_recursively(nxdl_element: ET.Element, + recursive_dict: Dict, + root_name: str = "", + reader_name: str = '', + is_root: bool = False) -> None: + """Scan xml elements, and pass the element to the type of element handaler. + + Parameters + ---------- + nxdl_element : ET.Element + This xml element that will be scanned through the descendants. + recursive_dict : Dict + A dict that store hierarchical structure of scheme eln. + root_name : str, optional + Name of root that user want to see to name their application, e.g. MPES, + by default 'ROOT_NAME' + reader_name : Prefered name of the reader. + is_root : bool, optional + Declar the elment as root or not, by default False + """ + + if is_root: + # Note for later: crate a new function to handle root part + nxdl = 'NX.nxdl' + recursive_dict[root_name] = {'base_sections': + ['nomad.datamodel.metainfo.eln.NexusDataConverter', + 'nomad.datamodel.data.EntryData']} + + m_annotations: Dict = {'m_annotations': {'template': {'reader': reader_name, + 'nxdl': nxdl}, + 'eln': {'hide': []}}} + + recursive_dict[root_name].update(m_annotations) + + recursive_dict = recursive_dict[root_name] + + # Define quantities for taking care of field + quantities: Dict = None + subsections: Dict = None + for elm in nxdl_element: + tag = remove_namespace_from_tag(elm.tag) + # To skip NXentry group but only consider the child elments + if _should_skip_iteration(elm): + scan_xml_element_recursively(elm, recursive_dict) + continue + if tag == 'field': + if quantities is None: + recursive_dict['quantities'] = {} + quantities = recursive_dict['quantities'] + construct_field_structure(elm, quantities) + if tag == 'group': + if subsections is None: + recursive_dict['sub_sections'] = {} + subsections = recursive_dict['sub_sections'] + construct_group_structure(elm, subsections) + + +def get_eln_recursive_dict(recursive_dict: Dict, nexus_full_file: str) -> None: + """Develop a recursive dict that has hierarchical structure of scheme eln. + + Parameters + ---------- + recursive_dict : Dict + A dict that store hierarchical structure of scheme eln. + nexus_full_file : str + Full path of NeXus file e.g. /paNXmpes.nxdl.xml + """ + + nxdl_root = ET.parse(nexus_full_file).getroot() + root_name = nxdl_root.attrib['name'][2:] if 'name' in nxdl_root.attrib else "" + recursive_dict['definitions'] = {'name': '', + 'sections': {}} + sections = recursive_dict['definitions']['sections'] + + scan_xml_element_recursively(nxdl_root, sections, + root_name=root_name, is_root=True) + + +def generate_scheme_eln(nexus_def: str, eln_file_name: str = None) -> None: + """Generate schema eln that should go to Nomad while running the reader. + The output file will be .scheme.archive.yaml + + Parameters + ---------- + nexus_def : str + Name of nexus definition e.g. NXmpes + eln_file_name : str + Name of output file e.g. mpes + + Returns: + None + """ + + file_parts: list = [] + out_file_ext = 'scheme.archive.yaml' + raw_name = "" + out_file = "" + + nxdl_file = retrieve_nxdl_file(nexus_def) + + if eln_file_name is None: + # raw_name from e.g. //NXmpes.nxdl.xml + raw_name = nxdl_file.split('/')[-1].split('.')[0][2:] + out_file = '.'.join([raw_name, out_file_ext]) + else: + file_parts = eln_file_name.split('.') + if len(file_parts) == 1: + raw_name = file_parts[0] + out_file = '.'.join([raw_name, out_file_ext]) + elif len(file_parts) == 4 and '.'.join(file_parts[1:]) == out_file_ext: + out_file = eln_file_name + elif nexus_def[0:2] == 'NX': + raw_name = nexus_def[2:] + out_file = '.'.join([raw_name, out_file_ext]) + else: + raise ValueError("Check for correct NeXus definition and output file name.") + + recursive_dict: Dict[str, Any] = {} + get_eln_recursive_dict(recursive_dict, nxdl_file) + + with open(out_file, mode='w', encoding='utf-8') as out_f: + yaml.dump(recursive_dict, sort_keys=False, stream=out_f) diff --git a/pynxtools/nexus/nexus.py b/pynxtools/nexus/nexus.py index 9afa711fb..ef5f64cd5 100644 --- a/pynxtools/nexus/nexus.py +++ b/pynxtools/nexus/nexus.py @@ -258,8 +258,9 @@ def get_hdf_path(hdf_info): return hdf_info['hdf_node'].name.split('/')[1:] +# pylint: disable=too-many-arguments,too-many-locals @lru_cache(maxsize=None) -def get_inherited_hdf_nodes(nx_name: str = None, elem: ET.Element = None, # pylint: disable=too-many-arguments,too-many-locals +def get_inherited_hdf_nodes(nx_name: str = None, elem: ET.Element = None, hdf_node=None, hdf_path=None, hdf_root=None, attr=False): """Returns a list of ET.Element for the given path.""" # let us start with the given definition file @@ -563,8 +564,11 @@ def hdf_node_to_self_concept_path(hdf_info, logger): class HandleNexus: """documentation""" + + # pylint: disable=too-many-instance-attributes def __init__(self, logger, nexus_file, - d_inq_nd=None, c_inq_nd=None): + d_inq_nd=None, c_inq_nd=None, + is_in_memory_file=False): self.logger = logger local_dir = os.path.abspath(os.path.dirname(__file__)) @@ -572,6 +576,7 @@ def __init__(self, logger, nexus_file, os.path.join(local_dir, '../../tests/data/nexus/201805_WSe2_arpes.nxs') self.parser = None self.in_file = None + self.is_hdf5_file_obj = is_in_memory_file self.d_inq_nd = d_inq_nd self.c_inq_nd = c_inq_nd # Aggregating hdf path corresponds to concept query node @@ -638,19 +643,28 @@ def full_visit(self, root, hdf_node, name, func): def process_nexus_master_file(self, parser): """Process a nexus master file by processing all its nodes and their attributes""" self.parser = parser - self.in_file = h5py.File( - self.input_file_name[0] - if isinstance(self.input_file_name, list) - else self.input_file_name, 'r' - ) - self.full_visit(self.in_file, self.in_file, '', self.visit_node) - if self.d_inq_nd is None and self.c_inq_nd is None: - get_default_plotable(self.in_file, self.logger) - # To log the provided concept and concepts founded - if self.c_inq_nd is not None: - for hdf_path in self.hdf_path_list_for_c_inq_nd: - self.logger.info(hdf_path) - self.in_file.close() + try: + if not self.is_hdf5_file_obj: + self.in_file = h5py.File( + self.input_file_name[0] + if isinstance(self.input_file_name, list) + else self.input_file_name, 'r' + ) + else: + self.in_file = self.input_file_name + + self.full_visit(self.in_file, self.in_file, '', self.visit_node) + + if self.d_inq_nd is None and self.c_inq_nd is None: + get_default_plotable(self.in_file, self.logger) + # To log the provided concept and concepts founded + if self.c_inq_nd is not None: + for hdf_path in self.hdf_path_list_for_c_inq_nd: + self.logger.info(hdf_path) + finally: + # To test if hdf_file is open print(self.in_file.id.valid) + self.in_file.close() + # To test if hdf_file is open print(self.in_file.id.valid) @click.command() diff --git a/pynxtools/nexus/nxdl_utils.py b/pynxtools/nexus/nxdl_utils.py index 706390a7c..aa64d5caa 100644 --- a/pynxtools/nexus/nxdl_utils.py +++ b/pynxtools/nexus/nxdl_utils.py @@ -701,6 +701,9 @@ def get_node_at_nxdl_path(nxdl_path: str = None, we are looking for or the root elem from a previously loaded NXDL file and finds the corresponding XML element with the needed attributes.""" try: + if nxdl_path.count("/") == 1 and nxdl_path not in ("/ENTRY", "/entry"): + elem = None + nx_name = "NXroot" (class_path, nxdlpath, elist) = get_inherited_nodes(nxdl_path, nx_name, elem) except ValueError as value_error: if exc: diff --git a/pyproject.toml b/pyproject.toml index 93917652e..d2c7853f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,7 @@ [build-system] requires = ["setuptools>=64.0.1", "setuptools-scm[toml]>=6.2"] -build-backend = "setuptools.build_meta" +backend-path = ["pynxtools"] +build-backend = "_build_wrapper" [project] name = "pynxtools" @@ -8,14 +9,15 @@ dynamic = ["version"] authors = [ { name = "The NOMAD Authors" }, ] -description = "Extend NeXus for materials science experiment and serve as a NOMAD parser implementation for NeXus." +description = "Extend NeXus for experiments and characterization in Materials Science and Materials Engineering and serve as a NOMAD parser implementation for NeXus." readme = "README.md" -license = { file = "LICENSE.txt" } -requires-python = ">=3.8,<3.11" +license = { file = "LICENSE" } +requires-python = ">=3.8,!=3.12" classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", ] @@ -29,20 +31,23 @@ dependencies = [ "ase>=3.19.0", "flatdict>=4.0.1", "hyperspy>=1.7.5", - "ifes_apt_tc_data_modeling>=0.0.9", + "ifes_apt_tc_data_modeling>=0.1", "gitpython>=3.1.24", "pytz>=2021.1", "kikuchipy>=0.9.0", "pyxem>=0.15.1", "zipfile37==0.1.3", - "nionswift==0.16.8", + "nionswift>=0.16.8", "tzlocal<=4.3", "scipy>=1.7.1", "lark>=1.1.5", "requests", "requests_cache", + "mergedeep" ] +[options] +install_requires = "importlib-metadata ; python_version < '3.10'" [project.urls] "Homepage" = "https://github.com/FAIRmat-NFDI/pynxtools" "Bug Tracker" = "https://github.com/FAIRmat-NFDI/pynxtools/issues" @@ -66,6 +71,7 @@ dev = [ read_nexus = "pynxtools.nexus.nexus:main" dataconverter = "pynxtools.dataconverter.convert:convert_cli" nyaml2nxdl = "pynxtools.nyaml2nxdl.nyaml2nxdl:launch_tool" +generate_eln = "pynxtools.eln_mapper.eln_mapper:get_eln" [tool.setuptools.package-data] pynxtools = ["definitions/**/*.xml", "definitions/**/*.xsd"] @@ -77,5 +83,5 @@ pynxtools = ["definitions/**/*.xml", "definitions/**/*.xsd"] exclude = ["pynxtools/definitions*"] [tool.setuptools_scm] -version_scheme = "guess-next-dev" +version_scheme = "no-guess-dev" local_scheme = "node-and-date" diff --git a/tests/data/dataconverter/NXtest.nxdl.xml b/tests/data/dataconverter/NXtest.nxdl.xml index a2cc553fa..f4aa0aab4 100644 --- a/tests/data/dataconverter/NXtest.nxdl.xml +++ b/tests/data/dataconverter/NXtest.nxdl.xml @@ -60,6 +60,9 @@ A dummy entry to test optional parent check for required child. + + This is a required group in an optional group. + diff --git a/tests/data/dataconverter/readers/apm/nomad_oasis_eln_schema_for_nx_apm/nxapm.schema.archive.yaml b/tests/data/dataconverter/readers/apm/nomad_oasis_eln_schema_for_nx_apm/nxapm.schema.archive.yaml index a750d3a80..ba4a00b3b 100644 --- a/tests/data/dataconverter/readers/apm/nomad_oasis_eln_schema_for_nx_apm/nxapm.schema.archive.yaml +++ b/tests/data/dataconverter/readers/apm/nomad_oasis_eln_schema_for_nx_apm/nxapm.schema.archive.yaml @@ -18,8 +18,7 @@ definitions: # This would be useful to make the default values set in `template` fixed. # Leave the hide key even if you want to pass an empty list like in this example. eln: - # hide: ['nxdl', 'reader'] - hide: [] + hide: ['nxdl', 'reader'] sub_sections: entry: section: @@ -29,24 +28,6 @@ definitions: eln: overview: true quantities: - attr_version: - type: - type_kind: Enum - type_data: - - 'nexus-fairmat-proposal successor of 9636feecb79bb32b828b1a9804269573256d7696' - description: Hashvalue of the NeXus application definition file - m_annotations: - eln: - component: RadioEnumEditQuantity - definition: - type: - type_kind: Enum - type_data: - - NXapm - description: NeXus NXDL schema to which this file conforms - m_annotations: - eln: - component: RadioEnumEditQuantity experiment_identifier: type: str description: GUID of the experiment @@ -58,40 +39,31 @@ definitions: description: Free text details about the experiment m_annotations: eln: - component: StringEditQuantity + component: RichTextEditQuantity start_time: type: Datetime - description: ISO 8601 time code with local time zone offset to UTC when the experiment started. + description: | + ISO 8601 time code with local time zone offset + to UTC when the experiment started. m_annotations: eln: component: DateTimeEditQuantity end_time: type: Datetime - description: ISO 8601 time code with local time zone offset to UTC when the experiment ended. + description: | + ISO 8601 time code with local time zone offset + to UTC when the experiment ended. m_annotations: eln: component: DateTimeEditQuantity - program: - type: str - description: Name of the program used to create this file. - m_annotations: - eln: - component: StringEditQuantity - program__attr_version: - type: str - description: Version plus build number, commit hash, or description of the program to support reproducibility. - m_annotations: - eln: - component: StringEditQuantity run_number: type: str - description: Identifier in the instrument control software given for this experiment. + description: | + Identifier in the instrument control software + given for this experiment. m_annotations: eln: component: StringEditQuantity - # experiment_documentation(NXnote): - # thumbnail(NXnote): - # attr_type: operation_mode: type: type_kind: Enum @@ -124,6 +96,173 @@ definitions: # m_annotations: # eln: # component: FileEditQuantity + + sample: + section: + description: | + Description of the sample from which the specimen was prepared or + site-specifically cut out using e.g. a focused-ion beam instrument. + m_annotations: + eln: + quantities: + composition: + type: str + shape: ['*'] + description: | + Chemical composition of the sample. The composition from e.g. + a composition table can be added as individual strings. + One string for each element with statements separated via a + single space. The string is expected to have the following format: + Symbol value unit +- stdev + + An example: B 1. +- 0.2, means + composition of boron 1. at.-% +- 0.2 at.%. + If a string contains only a symbol this is interpreted + that the symbol specifies the matrix or remainder element + for the composition table. + + If unit is omitted or named % this is interpreted as at.-%. + Unit can be at% or wt% but all strings have to use either atom + or weight percent but no mixtures. + No unit for stdev should be repeated as it has to be the + same unit as is used for the composition value. + m_annotations: + eln: + component: StringEditQuantity + grain_diameter: + type: np.float64 + unit: micrometer + description: | + Qualitative information about the grain size, here specifically + described as the equivalent spherical diameter of an assumed + average grain size for the crystal ensemble. + m_annotations: + eln: + component: NumberEditQuantity + minValue: 0.0 + defaultDisplayUnit: micrometer + grain_diameter_error: + type: np.float64 + unit: micrometer + description: | + Magnitude of the standard deviation to the grain_diameter. + m_annotations: + eln: + component: NumberEditQuantity + minValue: 0.0 + defaultDisplayUnit: micrometer + heat_treatment_temperature: + type: np.float64 + unit: kelvin + description: | + The temperature of the last heat treatment step before quenching. + m_annotations: + eln: + component: NumberEditQuantity + minValue: 0.0 + defaultDisplayUnit: kelvin + heat_treatment_temperature_error: + type: np.float64 + unit: kelvin + description: | + Magnitude of the standard deviation of the heat_treatment_temperature. + m_annotations: + eln: + component: NumberEditQuantity + minValue: 0.0 + defaultDisplayUnit: kelvin + heat_treatment_quenching_rate: + type: np.float64 + unit: kelvin/second + description: | + Rate of the last quenching step. + m_annotations: + eln: + component: NumberEditQuantity + minValue: 0.0 + defaultDisplayUnit: kelvin/second + heat_treatment_quenching_rate_error: + type: np.float64 + unit: K/s + description: | + Magnitude of the standard deviation of the heat_treatment_quenching_rate. + m_annotations: + eln: + component: NumberEditQuantity + minValue: 0.0 + defaultDisplayUnit: K/s + specimen: + section: + description: | + Details about the specimen and its immediate environment. + m_annotations: + eln: + quantities: + name: + type: str + description: | + GUID which distinguishes the specimen from all others and especially + the predecessor/origin from where the specimen was cut. + In cases where the specimen was e.g. site-specifically cut from + samples or in cases of an instrument session during which multiple + specimens are loaded, the name has to be descriptive enough to + resolve which specimen on e.g. the microtip array was taken. + This field must not be used for an alias of the specimen. + Instead, use short_title. + m_annotations: + eln: + component: StringEditQuantity + # sample_history: + # type: str + # description: | + # Reference to the location of or a GUID providing as many details + # as possible of the material, its microstructure, and its + # thermo-chemo-mechanical processing/preparation history. + # m_annotations: + # eln: + # component: StringEditQuantity + preparation_date: + type: Datetime + description: | + ISO 8601 time code with local time zone offset to UTC + when the measured specimen surface was prepared last time. + m_annotations: + eln: + component: DateTimeEditQuantity + is_polycrystalline: + type: bool + description: | + Is the specimen, i.e. the tip, polycrystalline, i.e. does + it includes a grain or phase boundary? + m_annotations: + eln: + component: BoolEditQuantity + alias: + type: str + description: | + Possibility to give an abbreviation of the specimen name field. + m_annotations: + eln: + component: StringEditQuantity + # atom_types should be a list of strings + # atom_types: + # type: str + # shape: ['*'] + # description: | + # Use Hill's system for listing elements of the periodic table which + # are inside or attached to the surface of the specimen and thus + # relevant from a scientific point of view. + # m_annotations: + # eln: + # component: StringEditQuantity + description: + type: str + description: | + Discouraged free text field to be used in the case when properly + designed records for the sample_history are not available. + m_annotations: + eln: + component: RichTextEditQuantity user: repeats: true section: @@ -193,102 +332,6 @@ definitions: m_annotations: eln: component: StringEditQuantity - specimen: - section: - description: | - Details about the specimen and its immediate environment. - m_annotations: - eln: - quantities: - name: - type: str - description: | - GUID which distinguishes the specimen from all others and especially - the predecessor/origin from where the specimen was cut. - In cases where the specimen was e.g. site-specifically cut from - samples or in cases of an instrument session during which multiple - specimens are loaded, the name has to be descriptive enough to - resolve which specimen on e.g. the microtip array was taken. - This field must not be used for an alias of the specimen. - Instead, use short_title. - m_annotations: - eln: - component: StringEditQuantity - sample_history: - type: str - description: | - Reference to the location of or a GUID providing as many details - as possible of the material, its microstructure, and its - thermo-chemo-mechanical processing/preparation history. - m_annotations: - eln: - component: StringEditQuantity - preparation_date: - type: Datetime - description: | - ISO 8601 time code with local time zone offset to UTC information when - the measured specimen surface was actively prepared. - m_annotations: - eln: - component: DateTimeEditQuantity - short_title: - type: str - description: Possibility to give an abbreviation of the specimen name field. - m_annotations: - eln: - component: StringEditQuantity - # atom_types should be a list of strings - atom_types: - type: str - shape: ['*'] - description: | - Use Hill's system for listing elements of the periodic table which - are inside or attached to the surface of the specimen and thus - relevant from a scientific point of view. - m_annotations: - eln: - component: StringEditQuantity - description: - type: str - description: | - Discouraged free text field to be used in the case when properly - designed records for the sample_history are not available. - m_annotations: - eln: - component: StringEditQuantity - # composition_element_symbol: - # type: str - # shape: ['*'] - # description: | - # Chemical symbol. - # m_annotations: - # eln: - # component: StringEditQuantity - # composition_mass_fraction: - # type: np.float64 - # shape: ['*'] - # description: | - # Composition but this can be atomic or mass fraction. - # Best is you specify which you want. Under the hood oasis uses pint - # /nomad/nomad/units is the place where you can predefine exotic - # constants and units for a local oasis instance - # m_annotations: - # eln: - # component: NumberEditQuantity - # minValue: 0. - # maxValue: 1. - # composition_mass_fraction_error: - # type: np.float64 - # shape: ['*'] - # description: | - # Composition but this can be atomic or mass fraction. - # Also here best to be specific. If people write at.-% but mean wt.-% you - # cannot guard yourself against this - # m_annotations: - # eln: - # component: NumberEditQuantity - # minValue: 0. - # maxValue: 1. atom_probe: section: description: | @@ -302,6 +345,7 @@ definitions: type_data: - success - failure + - unknown description: | A statement whether the measurement was successful or failed prematurely. @@ -314,6 +358,14 @@ definitions: m_annotations: eln: component: StringEditQuantity + location: + type: str + description: | + Location of the lab or place where the instrument is installed. + Using GEOREF is preferred. + m_annotations: + eln: + component: StringEditQuantity # (NXfabrication): flight_path_length: type: np.float64 @@ -327,6 +379,18 @@ definitions: defaultDisplayUnit: meter minValue: 0.0 maxValue: 10.0 + field_of_view: + type: np.float64 + unit: nanometer + description: | + The nominal diameter of the specimen ROI which is measured in the + experiment. Physically, the specimen cannot be measured completely + because ions may launch but not become detected or hit elsewhere. + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: nanometer + minValue: 0.0 fabrication_vendor: type: str description: Name of the manufacturer/company, i.e. AMETEK/Cameca. @@ -415,7 +479,7 @@ definitions: component: NumberEditQuantity defaultDisplayUnit: kelvin minValue: 0.0 - maxValue: 273.15 + maxValue: 300.0 analysis_chamber_pressure: type: np.float64 unit: torr @@ -485,8 +549,8 @@ definitions: type_kind: Enum type_data: - laser - - high_voltage - - laser_and_high_voltage + - voltage + - laser_and_voltage description: | Which pulsing mode was used? m_annotations: @@ -510,41 +574,53 @@ definitions: component: NumberEditQuantity minValue: 0.0 maxValue: 1.0 - laser_source_name: - type: str - description: Given name/alias. - m_annotations: - eln: - component: StringEditQuantity - laser_source_wavelength: - type: np.float64 - unit: meter - description: Nominal wavelength of the laser radiation. - m_annotations: - eln: - component: NumberEditQuantity - defaultDisplayUnit: nanometer - minValue: 0.0 - laser_source_power: - type: np.float64 - unit: watt - description: | - Nominal power of the laser source while - illuminating the specimen. - m_annotations: - eln: - component: NumberEditQuantity - defaultDisplayUnit: nanowatt - minValue: 0.0 - laser_source_pulse_energy: - type: np.float64 - unit: joule - description: Average energy of the laser at peak of each pulse. - m_annotations: - eln: - component: NumberEditQuantity - defaultDisplayUnit: picojoule - minValue: 0.0 + # LEAP 6000 instrument has up to two lasers + sub_sections: + laser_source: + repeats: True + section: + description: | + Details about each laser pulsing unit. + LEAP6000 instruments can use up to two lasers. + m_annotations: + eln: + quantities: + name: + type: str + description: Given name/alias. + m_annotations: + eln: + component: StringEditQuantity + wavelength: + type: np.float64 + unit: nanometer + description: Nominal wavelength of the laser radiation. + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: nanometer + minValue: 0.0 + power: + type: np.float64 + unit: nanowatt + description: | + Nominal power of the laser source while + illuminating the specimen. + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: nanowatt + minValue: 0.0 + pulse_energy: + type: np.float64 + unit: picojoule + description: | + Average energy of the laser at peak of each pulse. + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: picojoule + minValue: 0.0 # control_software: # section: # description: Which control software was used e.g. IVAS/APSuite diff --git a/tests/data/dataconverter/readers/ellips/eln_data.yaml b/tests/data/dataconverter/readers/ellips/eln_data.yaml index 70b708ef3..785e8e1e6 100644 --- a/tests/data/dataconverter/readers/ellips/eln_data.yaml +++ b/tests/data/dataconverter/readers/ellips/eln_data.yaml @@ -58,9 +58,6 @@ colnames: - Delta - err.Psi - err.Delta -definition: NXellipsometry -definition/@url: https://github.com/FAIRmat-NFDI/nexus_definitions/blob/fairmat/contributed_definitions/NXellipsometry.nxdl.xml -definition/@version: 0.0.2 derived_parameter_type: depolarization experiment_description: RC2 scan on 2nm SiO2 on Si in air experiment_identifier: exp-ID diff --git a/tests/data/dataconverter/readers/json_map/data.json b/tests/data/dataconverter/readers/json_map/data.json index 28fb71b48..ae0cf6c88 100644 --- a/tests/data/dataconverter/readers/json_map/data.json +++ b/tests/data/dataconverter/readers/json_map/data.json @@ -17,5 +17,6 @@ "type": "2nd type", "date_value": "2022-01-22T12:14:12.05018+00:00", "required_child": 1, - "optional_child": 1 + "optional_child": 1, + "random_data": [0, 1] } \ No newline at end of file diff --git a/tests/data/dataconverter/readers/json_map/data.mapping.json b/tests/data/dataconverter/readers/json_map/data.mapping.json index 5fc7b95c5..055b0977e 100644 --- a/tests/data/dataconverter/readers/json_map/data.mapping.json +++ b/tests/data/dataconverter/readers/json_map/data.mapping.json @@ -18,5 +18,6 @@ "/ENTRY[entry]/optional_parent/required_child": "/required_child", "/ENTRY[entry]/program_name": "Example for listing exact data in the map file: Nexus Parser", "/ENTRY[entry]/required_group/description": "An example description", - "/ENTRY[entry]/required_group2/description": "An example description" + "/ENTRY[entry]/required_group2/description": "An example description", + "/ENTRY[entry]/optional_parent/req_group_in_opt_group/DATA[data]": "/random_data" } \ No newline at end of file diff --git a/tests/data/dataconverter/readers/mpes/Ref_nexus_mpes.log b/tests/data/dataconverter/readers/mpes/Ref_nexus_mpes.log index 35c7fb42f..d4a58e2ee 100644 --- a/tests/data/dataconverter/readers/mpes/Ref_nexus_mpes.log +++ b/tests/data/dataconverter/readers/mpes/Ref_nexus_mpes.log @@ -8,12 +8,13 @@ DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY): DEBUG - DEBUG - documentation (NXentry.nxdl.xml:): DEBUG - - (**required**) :ref:`NXentry` describes the measurement. - - The top-level NeXus group which contains all the data and associated - information that comprise a single measurement. - It is mandatory that there is at least one - group of this type in the NeXus file. + (**required**) :ref:`NXentry` describes the measurement. + + The top-level NeXus group which contains all the data and associated + information that comprise a single measurement. + It is mandatory that there is at least one + group of this type in the NeXus file. + DEBUG - ===== ATTRS (//entry@NX_class) DEBUG - value: NXentry DEBUG - classpath: ['NXentry'] @@ -32,23 +33,23 @@ DEBUG - NXmpes.nxdl.xml:/ENTRY@default - [NX_CHAR] DEBUG - NXentry.nxdl.xml:@default - [NX_CHAR] DEBUG - documentation (NXentry.nxdl.xml:/default): DEBUG - - .. index:: find the default plottable data - .. index:: plotting - .. index:: default attribute value - - Declares which :ref:`NXdata` group contains the data - to be shown by default. - It is used to resolve ambiguity when - one :ref:`NXdata` group exists. - The value :ref:`names ` a child group. If that group - itself has a ``default`` attribute, continue this chain until an - :ref:`NXdata` group is reached. - - For more information about how NeXus identifies the default - plottable data, see the - :ref:`Find Plottable Data, v3 ` - section. - + .. index:: find the default plottable data + .. index:: plotting + .. index:: default attribute value + + Declares which :ref:`NXdata` group contains the data + to be shown by default. + It is used to resolve ambiguity when + one :ref:`NXdata` group exists. + The value :ref:`names ` a child group. If that group + itself has a ``default`` attribute, continue this chain until an + :ref:`NXdata` group is reached. + + For more information about how NeXus identifies the default + plottable data, see the + :ref:`Find Plottable Data, v3 ` + section. + DEBUG - ===== FIELD (//entry/collection_time): DEBUG - value: 2317.343 DEBUG - classpath: ['NXentry', 'NX_FLOAT'] @@ -57,9 +58,9 @@ NXentry.nxdl.xml:/collection_time DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/collection_time): DEBUG - - Time transpired actually collecting data i.e. taking out time when collection was - suspended due to e.g. temperature out of range - + Time transpired actually collecting data i.e. taking out time when collection was + suspended due to e.g. temperature out of range + DEBUG - ===== ATTRS (//entry/collection_time@units) DEBUG - value: s DEBUG - classpath: ['NXentry', 'NX_FLOAT'] @@ -77,34 +78,33 @@ DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/DATA): DEBUG - DEBUG - documentation (NXentry.nxdl.xml:/DATA): DEBUG - - The data group - - .. note:: Before the NIAC2016 meeting [#]_, at least one - :ref:`NXdata` group was required in each :ref:`NXentry` group. - At the NIAC2016 meeting, it was decided to make :ref:`NXdata` - an optional group in :ref:`NXentry` groups for data files that - do not use an application definition. - It is recommended strongly that all NeXus data files provide - a NXdata group. - It is permissable to omit the NXdata group only when - defining the default plot is not practical or possible - from the available data. - - For example, neutron event data may not have anything that - makes a useful plot without extensive processing. - - Certain application definitions override this decision and - require an :ref:`NXdata` group - in the :ref:`NXentry` group. The ``minOccurs=0`` attribute - in the application definition will indicate the - :ref:`NXdata` group - is optional, otherwise, it is required. - - .. [#] NIAC2016: - https://www.nexusformat.org/NIAC2016.html, - https://github.com/nexusformat/NIAC/issues/16 - - + The data group + + .. note:: Before the NIAC2016 meeting [#]_, at least one + :ref:`NXdata` group was required in each :ref:`NXentry` group. + At the NIAC2016 meeting, it was decided to make :ref:`NXdata` + an optional group in :ref:`NXentry` groups for data files that + do not use an application definition. + It is recommended strongly that all NeXus data files provide + a NXdata group. + It is permissable to omit the NXdata group only when + defining the default plot is not practical or possible + from the available data. + + For example, neutron event data may not have anything that + makes a useful plot without extensive processing. + + Certain application definitions override this decision and + require an :ref:`NXdata` group + in the :ref:`NXentry` group. The ``minOccurs=0`` attribute + in the application definition will indicate the + :ref:`NXdata` group + is optional, otherwise, it is required. + + .. [#] NIAC2016: + https://www.nexusformat.org/NIAC2016.html, + https://github.com/nexusformat/NIAC/issues/16 + DEBUG - documentation (NXdata.nxdl.xml:): DEBUG - :ref:`NXdata` describes the plottable data and related dimension scales. @@ -466,21 +466,21 @@ DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/definition): DEBUG - DEBUG - documentation (NXentry.nxdl.xml:/definition): DEBUG - - (alternate use: see same field in :ref:`NXsubentry` for preferred) - - Official NeXus NXDL schema to which this entry conforms which must be - the name of the NXDL file (case sensitive without the file extension) - that the NXDL schema is defined in. - - For example the ``definition`` field for a file that conformed to the - *NXarpes.nxdl.xml* definition must contain the string **NXarpes**. - - This field is provided so that :ref:`NXentry` can be the overlay position - in a NeXus data file for an application definition and its - set of groups, fields, and attributes. - - *It is advised* to use :ref:`NXsubentry`, instead, as the overlay position. - + (alternate use: see same field in :ref:`NXsubentry` for preferred) + + Official NeXus NXDL schema to which this entry conforms which must be + the name of the NXDL file (case sensitive without the file extension) + that the NXDL schema is defined in. + + For example the ``definition`` field for a file that conformed to the + *NXarpes.nxdl.xml* definition must contain the string **NXarpes**. + + This field is provided so that :ref:`NXentry` can be the overlay position + in a NeXus data file for an application definition and its + set of groups, fields, and attributes. + + *It is advised* to use :ref:`NXsubentry`, instead, as the overlay position. + DEBUG - ===== ATTRS (//entry/definition@version) DEBUG - value: None DEBUG - classpath: ['NXentry', 'NX_CHAR'] @@ -493,7 +493,9 @@ DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/definition/version): DEBUG - DEBUG - NXentry.nxdl.xml:/definition@version - [NX_CHAR] DEBUG - documentation (NXentry.nxdl.xml:/definition/version): -DEBUG - NXDL version number +DEBUG - + NXDL version number + DEBUG - ===== FIELD (//entry/duration): DEBUG - value: 2317 DEBUG - classpath: ['NXentry', 'NX_INT'] @@ -501,7 +503,9 @@ DEBUG - classes: NXentry.nxdl.xml:/duration DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/duration): -DEBUG - Duration of measurement +DEBUG - + Duration of measurement + DEBUG - ===== ATTRS (//entry/duration@units) DEBUG - value: s DEBUG - classpath: ['NXentry', 'NX_INT'] @@ -515,7 +519,9 @@ DEBUG - classes: NXentry.nxdl.xml:/end_time DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/end_time): -DEBUG - Ending time of measurement +DEBUG - + Ending time of measurement + DEBUG - ===== FIELD (//entry/entry_identifier): DEBUG - value: b'2019/2019_05/2019_05_23/Scan005' DEBUG - classpath: ['NXentry', 'NX_CHAR'] @@ -523,22 +529,39 @@ DEBUG - classes: NXentry.nxdl.xml:/entry_identifier DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/entry_identifier): -DEBUG - unique identifier for the measurement, defined by the facility. +DEBUG - + unique identifier for the measurement, defined by the facility. + DEBUG - ===== FIELD (//entry/experiment_facility): DEBUG - value: b'Time Resolved ARPES' -DEBUG - classpath: ['NXentry'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NX_CHAR'] +DEBUG - classes: +NXentry.nxdl.xml:/experiment_facility +DEBUG - <> +DEBUG - documentation (NXentry.nxdl.xml:/experiment_facility): DEBUG - + Name of the experimental facility + DEBUG - ===== FIELD (//entry/experiment_institution): DEBUG - value: b'Fritz Haber Institute - Max Planck Society' -DEBUG - classpath: ['NXentry'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NX_CHAR'] +DEBUG - classes: +NXentry.nxdl.xml:/experiment_institution +DEBUG - <> +DEBUG - documentation (NXentry.nxdl.xml:/experiment_institution): DEBUG - + Name of the institution hosting the facility + DEBUG - ===== FIELD (//entry/experiment_laboratory): DEBUG - value: b'Clean Room 4' -DEBUG - classpath: ['NXentry'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NX_CHAR'] +DEBUG - classes: +NXentry.nxdl.xml:/experiment_laboratory +DEBUG - <> +DEBUG - documentation (NXentry.nxdl.xml:/experiment_laboratory): DEBUG - + Name of the laboratory or beamline + DEBUG - ===== GROUP (//entry/instrument [NXmpes::/NXentry/NXinstrument]): DEBUG - classpath: ['NXentry', 'NXinstrument'] DEBUG - classes: @@ -552,15 +575,15 @@ DEBUG - documentation (NXentry.nxdl.xml:/INSTRUMENT): DEBUG - DEBUG - documentation (NXinstrument.nxdl.xml:): DEBUG - - Collection of the components of the instrument or beamline. - - Template of instrument descriptions comprising various beamline components. - Each component will also be a NeXus group defined by its distance from the - sample. Negative distances represent beamline components that are before the - sample while positive distances represent components that are after the sample. - This device allows the unique identification of beamline components in a way - that is valid for both reactor and pulsed instrumentation. - + Collection of the components of the instrument or beamline. + + Template of instrument descriptions comprising various beamline components. + Each component will also be a NeXus group defined by its distance from the + sample. Negative distances represent beamline components that are before the + sample while positive distances represent components that are after the sample. + This device allows the unique identification of beamline components in a way + that is valid for both reactor and pulsed instrumentation. + DEBUG - ===== ATTRS (//entry/instrument@NX_class) DEBUG - value: NXinstrument DEBUG - classpath: ['NXentry', 'NXinstrument'] @@ -583,22 +606,22 @@ DEBUG - documentation (NXinstrument.nxdl.xml:/BEAM): DEBUG - DEBUG - documentation (NXbeam.nxdl.xml:): DEBUG - - Properties of the neutron or X-ray beam at a given location. - - This group is intended to be referenced - by beamline component groups within the :ref:`NXinstrument` group or by the :ref:`NXsample` group. This group is - especially valuable in storing the results of instrument simulations in which it is useful - to specify the beam profile, time distribution etc. at each beamline component. Otherwise, - its most likely use is in the :ref:`NXsample` group in which it defines the results of the neutron - scattering by the sample, e.g., energy transfer, polarizations. Finally, There are cases where the beam is - considered as a beamline component and this group may be defined as a subgroup directly inside - :ref:`NXinstrument`, in which case it is recommended that the position of the beam is specified by an - :ref:`NXtransformations` group, unless the beam is at the origin (which is the sample). - - Note that incident_wavelength and related fields can be a scalar values or arrays, depending on the use case. - To support these use cases, the explicit dimensionality of these fields is not specified, but it can be inferred - by the presense of and shape of accompanying fields, such as incident_wavelength_weights for a polychromatic beam. - + Properties of the neutron or X-ray beam at a given location. + + This group is intended to be referenced + by beamline component groups within the :ref:`NXinstrument` group or by the :ref:`NXsample` group. This group is + especially valuable in storing the results of instrument simulations in which it is useful + to specify the beam profile, time distribution etc. at each beamline component. Otherwise, + its most likely use is in the :ref:`NXsample` group in which it defines the results of the neutron + scattering by the sample, e.g., energy transfer, polarizations. Finally, There are cases where the beam is + considered as a beamline component and this group may be defined as a subgroup directly inside + :ref:`NXinstrument`, in which case it is recommended that the position of the beam is specified by an + :ref:`NXtransformations` group, unless the beam is at the origin (which is the sample). + + Note that incident_wavelength and related fields can be a scalar values or arrays, depending on the use case. + To support these use cases, the explicit dimensionality of these fields is not specified, but it can be inferred + by the presense of and shape of accompanying fields, such as incident_wavelength_weights for a polychromatic beam. + DEBUG - ===== ATTRS (//entry/instrument/beam@NX_class) DEBUG - value: NXbeam DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] @@ -632,8 +655,8 @@ NXbeam.nxdl.xml:/extent DEBUG - <> DEBUG - documentation (NXbeam.nxdl.xml:/extent): DEBUG - - Size of the beam entering this component. Note this represents - a rectangular beam aperture, and values represent FWHM + Size of the beam entering this component. Note this represents + a rectangular beam aperture, and values represent FWHM DEBUG - ===== ATTRS (//entry/instrument/beam/extent@units) DEBUG - value: µm @@ -651,7 +674,24 @@ DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy): DEBUG - DEBUG - documentation (NXbeam.nxdl.xml:/incident_energy): -DEBUG - Energy carried by each particle of the beam on entering the beamline component +DEBUG - + Energy carried by each particle of the beam on entering the beamline component. + + In the case of a monochromatic beam this is the scalar energy. + Several other use cases are permitted, depending on the + presence of other incident_energy_X fields. + + * In the case of a polychromatic beam this is an array of length m of energies, with the relative weights in incident_energy_weights. + * In the case of a monochromatic beam that varies shot-to-shot, this is an array of energies, one for each recorded shot. + Here, incident_energy_weights and incident_energy_spread are not set. + * In the case of a polychromatic beam that varies shot-to-shot, + this is an array of length m with the relative weights in incident_energy_weights as a 2D array. + * In the case of a polychromatic beam that varies shot-to-shot and where the channels also vary, + this is a 2D array of dimensions nP by m (slow to fast) with the relative weights in incident_energy_weights as a 2D array. + + Note, variants are a good way to represent several of these use cases in a single dataset, + e.g. if a calibrated, single-value energy value is available along with the original spectrum from which it was calibrated. + DEBUG - ===== ATTRS (//entry/instrument/beam/incident_energy@units) DEBUG - value: eV DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] @@ -665,15 +705,25 @@ DEBUG - value: 0.11 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy_spread +NXbeam.nxdl.xml:/incident_energy_spread DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy_spread): DEBUG - +DEBUG - documentation (NXbeam.nxdl.xml:/incident_energy_spread): +DEBUG - + The energy spread FWHM for the corresponding energy(ies) in incident_energy. In the case of shot-to-shot variation in + the energy spread, this is a 2D array of dimension nP by m + (slow to fast) of the spreads of the corresponding + wavelength in incident_wavelength. + DEBUG - ===== ATTRS (//entry/instrument/beam/incident_energy_spread@units) DEBUG - value: eV DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy_spread +NXbeam.nxdl.xml:/incident_energy_spread DEBUG - NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy_spread@units [NX_ENERGY] +DEBUG - NXbeam.nxdl.xml:/incident_energy_spread@units [NX_ENERGY] DEBUG - ===== FIELD (//entry/instrument/beam/incident_polarization): DEBUG - value: [1. 1. 0. 0.] DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] @@ -684,7 +734,10 @@ DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_polarization): DEBUG - DEBUG - documentation (NXbeam.nxdl.xml:/incident_polarization): -DEBUG - Polarization vector on entering beamline component +DEBUG - + Incident polarization as a Stokes vector + on entering beamline component + DEBUG - ===== ATTRS (//entry/instrument/beam/incident_polarization@units) DEBUG - value: V^2/mm^2 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] @@ -695,14 +748,20 @@ DEBUG - NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_polarization@units [NX_A DEBUG - NXbeam.nxdl.xml:/incident_polarization@units [NX_ANY] DEBUG - ===== FIELD (//entry/instrument/beam/pulse_duration): DEBUG - value: 20.0 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_duration +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/pulse_duration): DEBUG - + FWHM duration of the pulses at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam/pulse_duration@units) DEBUG - value: fs -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_duration +DEBUG - NXbeam.nxdl.xml:/pulse_duration@units [NX_TIME] DEBUG - ===== GROUP (//entry/instrument/beam_pump [NXmpes::/NXentry/NXinstrument/NXbeam]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] DEBUG - classes: @@ -716,22 +775,22 @@ DEBUG - documentation (NXinstrument.nxdl.xml:/BEAM): DEBUG - DEBUG - documentation (NXbeam.nxdl.xml:): DEBUG - - Properties of the neutron or X-ray beam at a given location. - - This group is intended to be referenced - by beamline component groups within the :ref:`NXinstrument` group or by the :ref:`NXsample` group. This group is - especially valuable in storing the results of instrument simulations in which it is useful - to specify the beam profile, time distribution etc. at each beamline component. Otherwise, - its most likely use is in the :ref:`NXsample` group in which it defines the results of the neutron - scattering by the sample, e.g., energy transfer, polarizations. Finally, There are cases where the beam is - considered as a beamline component and this group may be defined as a subgroup directly inside - :ref:`NXinstrument`, in which case it is recommended that the position of the beam is specified by an - :ref:`NXtransformations` group, unless the beam is at the origin (which is the sample). - - Note that incident_wavelength and related fields can be a scalar values or arrays, depending on the use case. - To support these use cases, the explicit dimensionality of these fields is not specified, but it can be inferred - by the presense of and shape of accompanying fields, such as incident_wavelength_weights for a polychromatic beam. - + Properties of the neutron or X-ray beam at a given location. + + This group is intended to be referenced + by beamline component groups within the :ref:`NXinstrument` group or by the :ref:`NXsample` group. This group is + especially valuable in storing the results of instrument simulations in which it is useful + to specify the beam profile, time distribution etc. at each beamline component. Otherwise, + its most likely use is in the :ref:`NXsample` group in which it defines the results of the neutron + scattering by the sample, e.g., energy transfer, polarizations. Finally, There are cases where the beam is + considered as a beamline component and this group may be defined as a subgroup directly inside + :ref:`NXinstrument`, in which case it is recommended that the position of the beam is specified by an + :ref:`NXtransformations` group, unless the beam is at the origin (which is the sample). + + Note that incident_wavelength and related fields can be a scalar values or arrays, depending on the use case. + To support these use cases, the explicit dimensionality of these fields is not specified, but it can be inferred + by the presense of and shape of accompanying fields, such as incident_wavelength_weights for a polychromatic beam. + DEBUG - ===== ATTRS (//entry/instrument/beam_pump@NX_class) DEBUG - value: NXbeam DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] @@ -743,14 +802,20 @@ DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/instrument/beam_pump/average_power): DEBUG - value: 444.0 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/average_power +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/average_power): DEBUG - + Average power at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_pump/average_power@units) DEBUG - value: mW -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/average_power +DEBUG - NXbeam.nxdl.xml:/average_power@units [NX_POWER] DEBUG - ===== FIELD (//entry/instrument/beam_pump/distance): DEBUG - value: 0.0 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] @@ -775,8 +840,8 @@ NXbeam.nxdl.xml:/extent DEBUG - <> DEBUG - documentation (NXbeam.nxdl.xml:/extent): DEBUG - - Size of the beam entering this component. Note this represents - a rectangular beam aperture, and values represent FWHM + Size of the beam entering this component. Note this represents + a rectangular beam aperture, and values represent FWHM DEBUG - ===== ATTRS (//entry/instrument/beam_pump/extent@units) DEBUG - value: µm @@ -786,14 +851,20 @@ NXbeam.nxdl.xml:/extent DEBUG - NXbeam.nxdl.xml:/extent@units [NX_LENGTH] DEBUG - ===== FIELD (//entry/instrument/beam_pump/fluence): DEBUG - value: 1.3 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/fluence +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/fluence): DEBUG - + Incident fluence at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_pump/fluence@units) DEBUG - value: mJ/cm^2 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/fluence +DEBUG - NXbeam.nxdl.xml:/fluence@units [NX_ANY] DEBUG - ===== FIELD (//entry/instrument/beam_pump/incident_energy): DEBUG - value: 1.2 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] @@ -804,7 +875,24 @@ DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy): DEBUG - DEBUG - documentation (NXbeam.nxdl.xml:/incident_energy): -DEBUG - Energy carried by each particle of the beam on entering the beamline component +DEBUG - + Energy carried by each particle of the beam on entering the beamline component. + + In the case of a monochromatic beam this is the scalar energy. + Several other use cases are permitted, depending on the + presence of other incident_energy_X fields. + + * In the case of a polychromatic beam this is an array of length m of energies, with the relative weights in incident_energy_weights. + * In the case of a monochromatic beam that varies shot-to-shot, this is an array of energies, one for each recorded shot. + Here, incident_energy_weights and incident_energy_spread are not set. + * In the case of a polychromatic beam that varies shot-to-shot, + this is an array of length m with the relative weights in incident_energy_weights as a 2D array. + * In the case of a polychromatic beam that varies shot-to-shot and where the channels also vary, + this is a 2D array of dimensions nP by m (slow to fast) with the relative weights in incident_energy_weights as a 2D array. + + Note, variants are a good way to represent several of these use cases in a single dataset, + e.g. if a calibrated, single-value energy value is available along with the original spectrum from which it was calibrated. + DEBUG - ===== ATTRS (//entry/instrument/beam_pump/incident_energy@units) DEBUG - value: eV DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] @@ -818,15 +906,25 @@ DEBUG - value: 0.05 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy_spread +NXbeam.nxdl.xml:/incident_energy_spread DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy_spread): DEBUG - +DEBUG - documentation (NXbeam.nxdl.xml:/incident_energy_spread): +DEBUG - + The energy spread FWHM for the corresponding energy(ies) in incident_energy. In the case of shot-to-shot variation in + the energy spread, this is a 2D array of dimension nP by m + (slow to fast) of the spreads of the corresponding + wavelength in incident_wavelength. + DEBUG - ===== ATTRS (//entry/instrument/beam_pump/incident_energy_spread@units) DEBUG - value: eV DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy_spread +NXbeam.nxdl.xml:/incident_energy_spread DEBUG - NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_energy_spread@units [NX_ENERGY] +DEBUG - NXbeam.nxdl.xml:/incident_energy_spread@units [NX_ENERGY] DEBUG - ===== FIELD (//entry/instrument/beam_pump/incident_polarization): DEBUG - value: [1 1 0 0] DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] @@ -837,7 +935,10 @@ DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/BEAM/incident_polarization): DEBUG - DEBUG - documentation (NXbeam.nxdl.xml:/incident_polarization): -DEBUG - Polarization vector on entering beamline component +DEBUG - + Incident polarization as a Stokes vector + on entering beamline component + DEBUG - ===== ATTRS (//entry/instrument/beam_pump/incident_polarization@units) DEBUG - value: V^2/mm^2 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_NUMBER'] @@ -854,38 +955,38 @@ NXbeam.nxdl.xml:/incident_wavelength DEBUG - <> DEBUG - documentation (NXbeam.nxdl.xml:/incident_wavelength): DEBUG - - In the case of a monochromatic beam this is the scalar - wavelength. - - Several other use cases are permitted, depending on the - presence or absence of other incident_wavelength_X - fields. - - In the case of a polychromatic beam this is an array of - length **m** of wavelengths, with the relative weights - in ``incident_wavelength_weights``. - - In the case of a monochromatic beam that varies shot- - to-shot, this is an array of wavelengths, one for each - recorded shot. Here, ``incident_wavelength_weights`` and - incident_wavelength_spread are not set. - - In the case of a polychromatic beam that varies shot-to- - shot, this is an array of length **m** with the relative - weights in ``incident_wavelength_weights`` as a 2D array. - - In the case of a polychromatic beam that varies shot-to- - shot and where the channels also vary, this is a 2D array - of dimensions **nP** by **m** (slow to fast) with the - relative weights in ``incident_wavelength_weights`` as a 2D - array. - - Note, :ref:`variants ` are a good way - to represent several of these use cases in a single dataset, - e.g. if a calibrated, single-value wavelength value is - available along with the original spectrum from which it - was calibrated. - Wavelength on entering beamline component + In the case of a monochromatic beam this is the scalar + wavelength. + + Several other use cases are permitted, depending on the + presence or absence of other incident_wavelength_X + fields. + + In the case of a polychromatic beam this is an array of + length **m** of wavelengths, with the relative weights + in ``incident_wavelength_weights``. + + In the case of a monochromatic beam that varies shot- + to-shot, this is an array of wavelengths, one for each + recorded shot. Here, ``incident_wavelength_weights`` and + incident_wavelength_spread are not set. + + In the case of a polychromatic beam that varies shot-to- + shot, this is an array of length **m** with the relative + weights in ``incident_wavelength_weights`` as a 2D array. + + In the case of a polychromatic beam that varies shot-to- + shot and where the channels also vary, this is a 2D array + of dimensions **nP** by **m** (slow to fast) with the + relative weights in ``incident_wavelength_weights`` as a 2D + array. + + Note, :ref:`variants ` are a good way + to represent several of these use cases in a single dataset, + e.g. if a calibrated, single-value wavelength value is + available along with the original spectrum from which it + was calibrated. + Wavelength on entering beamline component DEBUG - ===== ATTRS (//entry/instrument/beam_pump/incident_wavelength@units) DEBUG - value: nm @@ -895,24 +996,36 @@ NXbeam.nxdl.xml:/incident_wavelength DEBUG - NXbeam.nxdl.xml:/incident_wavelength@units [NX_WAVELENGTH] DEBUG - ===== FIELD (//entry/instrument/beam_pump/pulse_duration): DEBUG - value: 140.0 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_duration +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/pulse_duration): DEBUG - + FWHM duration of the pulses at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_pump/pulse_duration@units) DEBUG - value: fs -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_duration +DEBUG - NXbeam.nxdl.xml:/pulse_duration@units [NX_TIME] DEBUG - ===== FIELD (//entry/instrument/beam_pump/pulse_energy): DEBUG - value: 0.889 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_energy +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/pulse_energy): DEBUG - + Energy of a single pulse at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_pump/pulse_energy@units) DEBUG - value: µJ -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_energy +DEBUG - NXbeam.nxdl.xml:/pulse_energy@units [NX_ENERGY] DEBUG - ===== GROUP (//entry/instrument/electronanalyser [NXmpes::/NXentry/NXinstrument/NXelectronanalyser]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser'] DEBUG - classes: @@ -948,8 +1061,8 @@ DEBUG - DEBUG - documentation (NXcollectioncolumn.nxdl.xml:): DEBUG - - Subclass of NXelectronanalyser to describe the electron collection column of a - photoelectron analyser. + Subclass of NXelectronanalyser to describe the electron collection + column of a photoelectron analyser. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn@NX_class) DEBUG - value: NXcollectioncolumn @@ -978,7 +1091,9 @@ DEBUG - or contrast aperture DEBUG - documentation (NXaperture.nxdl.xml:): -DEBUG - A beamline aperture. This group is deprecated, use NXslit instead. +DEBUG - + A beamline aperture. This group is deprecated, use NXslit instead. + DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture@NX_class) DEBUG - value: NXaperture DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] @@ -988,35 +1103,80 @@ NXcollectioncolumn.nxdl.xml:/APERTURE NXaperture.nxdl.xml: DEBUG - @NX_class [NX_CHAR] DEBUG - -DEBUG - ===== GROUP (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture/ca_m3 [NXmpes::/NXentry/NXinstrument/NXelectronanalyser/NXcollectioncolumn/NXaperture/ca_m3]): -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA +DEBUG - ===== GROUP (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture/ca_m3 [NXmpes::/NXentry/NXinstrument/NXelectronanalyser/NXcollectioncolumn/NXaperture/NXpositioner]): +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner'] +DEBUG - classes: +NXaperture.nxdl.xml:/POSITIONER +NXpositioner.nxdl.xml: +DEBUG - <> +DEBUG - documentation (NXaperture.nxdl.xml:/POSITIONER): +DEBUG - + Stores the raw positions of aperture motors. + +DEBUG - documentation (NXpositioner.nxdl.xml:): +DEBUG - + A generic positioner such as a motor or piezo-electric transducer. + +DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture/ca_m3@NX_class) +DEBUG - value: NXpositioner +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner'] +DEBUG - classes: +NXaperture.nxdl.xml:/POSITIONER +NXpositioner.nxdl.xml: +DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture/ca_m3/value): DEBUG - value: -11.49979350759219 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner', 'NX_NUMBER'] +DEBUG - classes: +NXpositioner.nxdl.xml:/value +DEBUG - <> +DEBUG - documentation (NXpositioner.nxdl.xml:/value): +DEBUG - best known value of positioner - need [n] as may be scanned DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture/ca_m3/value@units) DEBUG - value: mm -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner', 'NX_NUMBER'] +DEBUG - classes: +NXpositioner.nxdl.xml:/value +DEBUG - NXpositioner.nxdl.xml:/value@units [NX_ANY] DEBUG - ===== FIELD (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture/shape): DEBUG - value: b'open' -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NX_CHAR'] +DEBUG - classes: +NXaperture.nxdl.xml:/shape +DEBUG - <> +DEBUG - enumeration (NXaperture.nxdl.xml:/shape): +DEBUG - -> straight slit +DEBUG - -> curved slit +DEBUG - -> pinhole +DEBUG - -> circle +DEBUG - -> square +DEBUG - -> hexagon +DEBUG - -> octagon +DEBUG - -> bladed +DEBUG - -> open +DEBUG - -> grid +DEBUG - documentation (NXaperture.nxdl.xml:/shape): +DEBUG - + Shape of the aperture. + DEBUG - ===== FIELD (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture/size): DEBUG - value: nan -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NX_NUMBER'] +DEBUG - classes: +NXaperture.nxdl.xml:/size +DEBUG - <> +DEBUG - documentation (NXaperture.nxdl.xml:/size): DEBUG - + The relevant dimension for the aperture, i.e. slit width, pinhole and iris + diameter + DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/contrast_aperture/size@units) DEBUG - value: µm -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NX_NUMBER'] +DEBUG - classes: +NXaperture.nxdl.xml:/size +DEBUG - NXaperture.nxdl.xml:/size@units [NX_LENGTH] DEBUG - ===== FIELD (//entry/instrument/electronanalyser/collectioncolumn/extractor_current): DEBUG - value: -0.1309711275510204 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NX_FLOAT'] @@ -1068,7 +1228,9 @@ DEBUG - or contrast aperture DEBUG - documentation (NXaperture.nxdl.xml:): -DEBUG - A beamline aperture. This group is deprecated, use NXslit instead. +DEBUG - + A beamline aperture. This group is deprecated, use NXslit instead. + DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/field_aperture@NX_class) DEBUG - value: NXaperture DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] @@ -1078,49 +1240,116 @@ NXcollectioncolumn.nxdl.xml:/APERTURE NXaperture.nxdl.xml: DEBUG - @NX_class [NX_CHAR] DEBUG - -DEBUG - ===== GROUP (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m1 [NXmpes::/NXentry/NXinstrument/NXelectronanalyser/NXcollectioncolumn/NXaperture/fa_m1]): -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA +DEBUG - ===== GROUP (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m1 [NXmpes::/NXentry/NXinstrument/NXelectronanalyser/NXcollectioncolumn/NXaperture/NXpositioner]): +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner'] +DEBUG - classes: +NXaperture.nxdl.xml:/POSITIONER +NXpositioner.nxdl.xml: +DEBUG - <> +DEBUG - documentation (NXaperture.nxdl.xml:/POSITIONER): +DEBUG - + Stores the raw positions of aperture motors. + +DEBUG - documentation (NXpositioner.nxdl.xml:): +DEBUG - + A generic positioner such as a motor or piezo-electric transducer. + +DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m1@NX_class) +DEBUG - value: NXpositioner +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner'] +DEBUG - classes: +NXaperture.nxdl.xml:/POSITIONER +NXpositioner.nxdl.xml: +DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m1/value): DEBUG - value: 3.749874153422982 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner', 'NX_NUMBER'] +DEBUG - classes: +NXpositioner.nxdl.xml:/value +DEBUG - <> +DEBUG - documentation (NXpositioner.nxdl.xml:/value): +DEBUG - best known value of positioner - need [n] as may be scanned DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m1/value@units) DEBUG - value: mm -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner', 'NX_NUMBER'] +DEBUG - classes: +NXpositioner.nxdl.xml:/value +DEBUG - NXpositioner.nxdl.xml:/value@units [NX_ANY] +DEBUG - ===== GROUP (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m2 [NXmpes::/NXentry/NXinstrument/NXelectronanalyser/NXcollectioncolumn/NXaperture/NXpositioner]): +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner'] +DEBUG - classes: +NXaperture.nxdl.xml:/POSITIONER +NXpositioner.nxdl.xml: +DEBUG - <> +DEBUG - documentation (NXaperture.nxdl.xml:/POSITIONER): DEBUG - -DEBUG - ===== GROUP (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m2 [NXmpes::/NXentry/NXinstrument/NXelectronanalyser/NXcollectioncolumn/NXaperture/fa_m2]): -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA + Stores the raw positions of aperture motors. + +DEBUG - documentation (NXpositioner.nxdl.xml:): +DEBUG - + A generic positioner such as a motor or piezo-electric transducer. + +DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m2@NX_class) +DEBUG - value: NXpositioner +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner'] +DEBUG - classes: +NXaperture.nxdl.xml:/POSITIONER +NXpositioner.nxdl.xml: +DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m2/value): DEBUG - value: -5.200156936301793 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner', 'NX_NUMBER'] +DEBUG - classes: +NXpositioner.nxdl.xml:/value +DEBUG - <> +DEBUG - documentation (NXpositioner.nxdl.xml:/value): +DEBUG - best known value of positioner - need [n] as may be scanned DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/fa_m2/value@units) DEBUG - value: mm -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NXpositioner', 'NX_NUMBER'] +DEBUG - classes: +NXpositioner.nxdl.xml:/value +DEBUG - NXpositioner.nxdl.xml:/value@units [NX_ANY] DEBUG - ===== FIELD (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/shape): DEBUG - value: b'circle' -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NX_CHAR'] +DEBUG - classes: +NXaperture.nxdl.xml:/shape +DEBUG - <> +DEBUG - enumeration (NXaperture.nxdl.xml:/shape): +DEBUG - -> straight slit +DEBUG - -> curved slit +DEBUG - -> pinhole +DEBUG - -> circle +DEBUG - -> square +DEBUG - -> hexagon +DEBUG - -> octagon +DEBUG - -> bladed +DEBUG - -> open +DEBUG - -> grid +DEBUG - documentation (NXaperture.nxdl.xml:/shape): +DEBUG - + Shape of the aperture. + DEBUG - ===== FIELD (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/size): DEBUG - value: 200.0 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NX_NUMBER'] +DEBUG - classes: +NXaperture.nxdl.xml:/size +DEBUG - <> +DEBUG - documentation (NXaperture.nxdl.xml:/size): DEBUG - + The relevant dimension for the aperture, i.e. slit width, pinhole and iris + diameter + DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/field_aperture/size@units) DEBUG - value: µm -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXaperture', 'NX_NUMBER'] +DEBUG - classes: +NXaperture.nxdl.xml:/size +DEBUG - NXaperture.nxdl.xml:/size@units [NX_LENGTH] DEBUG - ===== GROUP (//entry/instrument/electronanalyser/collectioncolumn/lens_A [NXmpes::/NXentry/NXinstrument/NXelectronanalyser/NXcollectioncolumn/NXlens_em]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXcollectioncolumn', 'NXlens_em'] DEBUG - classes: @@ -1133,14 +1362,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_A@NX_class) DEBUG - value: NXlens_em @@ -1169,8 +1398,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_A/voltage@units) DEBUG - value: V @@ -1190,14 +1421,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_B@NX_class) DEBUG - value: NXlens_em @@ -1226,8 +1457,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_B/voltage@units) DEBUG - value: V @@ -1247,14 +1480,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_C@NX_class) DEBUG - value: NXlens_em @@ -1283,8 +1516,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_C/voltage@units) DEBUG - value: V @@ -1304,14 +1539,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_D@NX_class) DEBUG - value: NXlens_em @@ -1340,8 +1575,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_D/voltage@units) DEBUG - value: V @@ -1361,14 +1598,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_E@NX_class) DEBUG - value: NXlens_em @@ -1397,8 +1634,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_E/voltage@units) DEBUG - value: V @@ -1418,14 +1657,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_F@NX_class) DEBUG - value: NXlens_em @@ -1454,8 +1693,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_F/voltage@units) DEBUG - value: V @@ -1475,14 +1716,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_Foc@NX_class) DEBUG - value: NXlens_em @@ -1511,8 +1752,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_Foc/voltage@units) DEBUG - value: V @@ -1532,14 +1775,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_G@NX_class) DEBUG - value: NXlens_em @@ -1568,8 +1811,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_G/voltage@units) DEBUG - value: V @@ -1589,14 +1834,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_H@NX_class) DEBUG - value: NXlens_em @@ -1625,8 +1870,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_H/voltage@units) DEBUG - value: V @@ -1646,14 +1893,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_I@NX_class) DEBUG - value: NXlens_em @@ -1682,8 +1929,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_I/voltage@units) DEBUG - value: V @@ -1703,14 +1952,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_UCA@NX_class) DEBUG - value: NXlens_em @@ -1739,8 +1988,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_UCA/voltage@units) DEBUG - value: V @@ -1760,14 +2011,14 @@ DEBUG - DEBUG - documentation (NXlens_em.nxdl.xml:): DEBUG - - Description of an electro-magnetic lens or a compound lens. + Base class for an electro-magnetic lens or a compound lens. - For NXtransformations the origin of the coordinate system is placed - in the center of the lens - (its polepiece, pinhole, or another point of reference). - The origin should be specified in the NXtransformations. + For :ref:`NXtransformations` the origin of the coordinate system is placed + in the center of the lens (its polepiece, pinhole, or another + point of reference). The origin should be specified in the :ref:`NXtransformations`. - For details of electro-magnetic lenses in the literature see e.g. `L. Reimer `_ + For details of electro-magnetic lenses in the literature + see e.g. `L. Reimer `_ DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_UFA@NX_class) DEBUG - value: NXlens_em @@ -1796,8 +2047,10 @@ NXlens_em.nxdl.xml:/voltage DEBUG - <> DEBUG - documentation (NXlens_em.nxdl.xml:/voltage): DEBUG - - Excitation voltage of the lens. For dipoles it is a single number. For higher - orders, it is an array. + Excitation voltage of the lens. + + For dipoles it is a single number. + For higher order multipoles, it is an array. DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/collectioncolumn/lens_UFA/voltage@units) DEBUG - value: V @@ -1912,8 +2165,8 @@ DEBUG - DEBUG - documentation (NXdetector.nxdl.xml:): DEBUG - - A detector, detector bank, or multidetector. - + A detector, detector bank, or multidetector. + DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/detector@NX_class) DEBUG - value: NXdetector DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector'] @@ -1925,19 +2178,26 @@ DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/instrument/electronanalyser/detector/amplifier_bias): DEBUG - value: 30.0 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_FLOAT'] +DEBUG - classes: +NXdetector.nxdl.xml:/amplifier_bias +DEBUG - <> +DEBUG - documentation (NXdetector.nxdl.xml:/amplifier_bias): DEBUG - + The low voltage of the amplifier migh not be the ground. + DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/detector/amplifier_bias@units) DEBUG - value: V -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_FLOAT'] +DEBUG - classes: +NXdetector.nxdl.xml:/amplifier_bias +DEBUG - NXdetector.nxdl.xml:/amplifier_bias@units [NX_VOLTAGE] DEBUG - ===== FIELD (//entry/instrument/electronanalyser/detector/amplifier_type): DEBUG - value: b'MCP' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_CHAR'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/ELECTRONANALYSER/DETECTOR/amplifier_type +NXdetector.nxdl.xml:/amplifier_type DEBUG - <> DEBUG - enumeration (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/ELECTRONANALYSER/DETECTOR/amplifier_type): DEBUG - -> MCP @@ -1946,21 +2206,32 @@ DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/ELECTRONANALYSER/DETECT DEBUG - Type of electron amplifier in the first amplification step. +DEBUG - documentation (NXdetector.nxdl.xml:/amplifier_type): +DEBUG - + Type of electron amplifier, MCP, channeltron, etc. + DEBUG - ===== FIELD (//entry/instrument/electronanalyser/detector/amplifier_voltage): DEBUG - value: 2340.0 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_FLOAT'] +DEBUG - classes: +NXdetector.nxdl.xml:/amplifier_voltage +DEBUG - <> +DEBUG - documentation (NXdetector.nxdl.xml:/amplifier_voltage): DEBUG - + Voltage applied to the amplifier. + DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/detector/amplifier_voltage@units) DEBUG - value: V -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_FLOAT'] +DEBUG - classes: +NXdetector.nxdl.xml:/amplifier_voltage +DEBUG - NXdetector.nxdl.xml:/amplifier_voltage@units [NX_VOLTAGE] DEBUG - ===== FIELD (//entry/instrument/electronanalyser/detector/detector_type): DEBUG - value: b'DLD' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_CHAR'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/ELECTRONANALYSER/DETECTOR/detector_type +NXdetector.nxdl.xml:/detector_type DEBUG - <> DEBUG - enumeration (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/ELECTRONANALYSER/DETECTOR/detector_type): DEBUG - -> DLD @@ -1973,21 +2244,36 @@ DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/ELECTRONANALYSER/DETECT DEBUG - Description of the detector type. +DEBUG - documentation (NXdetector.nxdl.xml:/detector_type): +DEBUG - + Description of the detector type, DLD, Phosphor+CCD, CMOS. + DEBUG - ===== FIELD (//entry/instrument/electronanalyser/detector/detector_voltage): DEBUG - value: 399.99712810186986 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_FLOAT'] +DEBUG - classes: +NXdetector.nxdl.xml:/detector_voltage +DEBUG - <> +DEBUG - documentation (NXdetector.nxdl.xml:/detector_voltage): +DEBUG - + Voltage applied to detector. + DEBUG - ===== ATTRS (//entry/instrument/electronanalyser/detector/detector_voltage@units) DEBUG - value: V -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_FLOAT'] +DEBUG - classes: +NXdetector.nxdl.xml:/detector_voltage +DEBUG - NXdetector.nxdl.xml:/detector_voltage@units [NX_VOLTAGE] DEBUG - ===== FIELD (//entry/instrument/electronanalyser/detector/sensor_pixels): DEBUG - value: [1800 1800] -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NXdetector', 'NX_INT'] +DEBUG - classes: +NXdetector.nxdl.xml:/sensor_pixels +DEBUG - <> +DEBUG - documentation (NXdetector.nxdl.xml:/sensor_pixels): DEBUG - + Number of raw active elements in each dimension. Important for swept scans. + DEBUG - ===== FIELD (//entry/instrument/electronanalyser/energy_resolution): DEBUG - value: 110.0 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXelectronanalyser', 'NX_FLOAT'] @@ -2441,15 +2727,22 @@ DEBUG - value: 140.0 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/energy_resolution +NXinstrument.nxdl.xml:/energy_resolution DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/energy_resolution): DEBUG - +DEBUG - documentation (NXinstrument.nxdl.xml:/energy_resolution): +DEBUG - + Energy resolution of the experiment (FWHM or gaussian broadening) + DEBUG - ===== ATTRS (//entry/instrument/energy_resolution@units) DEBUG - value: meV DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/energy_resolution +NXinstrument.nxdl.xml:/energy_resolution DEBUG - NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/energy_resolution@units [NX_ENERGY] +DEBUG - NXinstrument.nxdl.xml:/energy_resolution@units [NX_ENERGY] DEBUG - ===== GROUP (//entry/instrument/manipulator [NXmpes::/NXentry/NXinstrument/NXmanipulator]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXmanipulator'] DEBUG - classes: @@ -2854,14 +3147,20 @@ DEBUG - DEBUG - ===== FIELD (//entry/instrument/momentum_resolution): DEBUG - value: 0.08 -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/momentum_resolution +DEBUG - <> +DEBUG - documentation (NXinstrument.nxdl.xml:/momentum_resolution): DEBUG - + Momentum resolution of the experiment (FWHM) + DEBUG - ===== ATTRS (//entry/instrument/momentum_resolution@units) DEBUG - value: 1/angstrom -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/momentum_resolution +DEBUG - NXinstrument.nxdl.xml:/momentum_resolution@units [NX_WAVENUMBER] DEBUG - ===== FIELD (//entry/instrument/name): DEBUG - value: b'Time-of-flight momentum microscope equipped delay line detector, at the endstation of the high rep-rate HHG source at FHI' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_CHAR'] @@ -2869,7 +3168,9 @@ DEBUG - classes: NXinstrument.nxdl.xml:/name DEBUG - <> DEBUG - documentation (NXinstrument.nxdl.xml:/name): -DEBUG - Name of instrument +DEBUG - + Name of instrument + DEBUG - ===== ATTRS (//entry/instrument/name@short_name) DEBUG - value: TR-ARPES @ FHI DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_CHAR'] @@ -2878,7 +3179,9 @@ NXinstrument.nxdl.xml:/name DEBUG - NXinstrument.nxdl.xml:/name@short_name - [NX_CHAR] DEBUG - <> DEBUG - documentation (NXinstrument.nxdl.xml:/name/short_name): -DEBUG - short name for instrument, perhaps the acronym +DEBUG - + short name for instrument, perhaps the acronym + DEBUG - ===== GROUP (//entry/instrument/source [NXmpes::/NXentry/NXinstrument/NXsource]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] DEBUG - classes: @@ -2896,7 +3199,9 @@ DEBUG - DEBUG - documentation (NXinstrument.nxdl.xml:/SOURCE): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:): -DEBUG - The neutron or x-ray storage ring/facility. +DEBUG - + The neutron or x-ray storage ring/facility. + DEBUG - ===== ATTRS (//entry/instrument/source@NX_class) DEBUG - value: NXsource DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] @@ -2913,7 +3218,9 @@ DEBUG - classes: NXsource.nxdl.xml:/frequency DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/frequency): -DEBUG - Frequency of pulsed source +DEBUG - + Frequency of pulsed source + DEBUG - ===== ATTRS (//entry/instrument/source/frequency@units) DEBUG - value: kHz DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -2930,7 +3237,9 @@ DEBUG - enumeration (NXsource.nxdl.xml:/mode): DEBUG - -> Single Bunch DEBUG - -> Multi Bunch DEBUG - documentation (NXsource.nxdl.xml:/mode): -DEBUG - source operating mode +DEBUG - + source operating mode + DEBUG - ===== FIELD (//entry/instrument/source/name): DEBUG - value: b'HHG @ TR-ARPES @ FHI' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -2941,17 +3250,26 @@ DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/name): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/name): -DEBUG - Name of source +DEBUG - + Name of source + DEBUG - ===== FIELD (//entry/instrument/source/photon_energy): DEBUG - value: 21.7 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] +DEBUG - classes: +NXsource.nxdl.xml:/photon_energy +DEBUG - <> +DEBUG - documentation (NXsource.nxdl.xml:/photon_energy): DEBUG - + The center photon energy of the source, before it is + monochromatized or converted + DEBUG - ===== ATTRS (//entry/instrument/source/photon_energy@units) DEBUG - value: eV -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] +DEBUG - classes: +NXsource.nxdl.xml:/photon_energy +DEBUG - NXsource.nxdl.xml:/photon_energy@units [NX_ENERGY] DEBUG - ===== FIELD (//entry/instrument/source/probe): DEBUG - value: b'ultraviolet' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -2978,7 +3296,9 @@ DEBUG - restricted. DEBUG - documentation (NXsource.nxdl.xml:/probe): -DEBUG - type of radiation probe (pick one from the enumerated list and spell exactly) +DEBUG - + type of radiation probe (pick one from the enumerated list and spell exactly) + DEBUG - ===== FIELD (//entry/instrument/source/type): DEBUG - value: b'HHG laser' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -3013,7 +3333,9 @@ DEBUG - -> Metal Jet X-ray DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/type): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/type): -DEBUG - type of radiation source (pick one from the enumerated list and spell exactly) +DEBUG - + type of radiation source (pick one from the enumerated list and spell exactly) + DEBUG - ===== GROUP (//entry/instrument/source_pump [NXmpes::/NXentry/NXinstrument/NXsource]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] DEBUG - classes: @@ -3031,7 +3353,9 @@ DEBUG - DEBUG - documentation (NXinstrument.nxdl.xml:/SOURCE): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:): -DEBUG - The neutron or x-ray storage ring/facility. +DEBUG - + The neutron or x-ray storage ring/facility. + DEBUG - ===== ATTRS (//entry/instrument/source_pump@NX_class) DEBUG - value: NXsource DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] @@ -3048,7 +3372,9 @@ DEBUG - classes: NXsource.nxdl.xml:/frequency DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/frequency): -DEBUG - Frequency of pulsed source +DEBUG - + Frequency of pulsed source + DEBUG - ===== ATTRS (//entry/instrument/source_pump/frequency@units) DEBUG - value: kHz DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -3065,7 +3391,9 @@ DEBUG - enumeration (NXsource.nxdl.xml:/mode): DEBUG - -> Single Bunch DEBUG - -> Multi Bunch DEBUG - documentation (NXsource.nxdl.xml:/mode): -DEBUG - source operating mode +DEBUG - + source operating mode + DEBUG - ===== FIELD (//entry/instrument/source_pump/name): DEBUG - value: b'OPCPA @ TR-ARPES @ FHI' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -3076,17 +3404,26 @@ DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/name): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/name): -DEBUG - Name of source +DEBUG - + Name of source + DEBUG - ===== FIELD (//entry/instrument/source_pump/photon_energy): DEBUG - value: 1.2 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] +DEBUG - classes: +NXsource.nxdl.xml:/photon_energy +DEBUG - <> +DEBUG - documentation (NXsource.nxdl.xml:/photon_energy): DEBUG - + The center photon energy of the source, before it is + monochromatized or converted + DEBUG - ===== ATTRS (//entry/instrument/source_pump/photon_energy@units) DEBUG - value: eV -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] +DEBUG - classes: +NXsource.nxdl.xml:/photon_energy +DEBUG - NXsource.nxdl.xml:/photon_energy@units [NX_ENERGY] DEBUG - ===== FIELD (//entry/instrument/source_pump/probe): DEBUG - value: b'visible light' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -3113,7 +3450,9 @@ DEBUG - restricted. DEBUG - documentation (NXsource.nxdl.xml:/probe): -DEBUG - type of radiation probe (pick one from the enumerated list and spell exactly) +DEBUG - + type of radiation probe (pick one from the enumerated list and spell exactly) + DEBUG - ===== FIELD (//entry/instrument/source_pump/type): DEBUG - value: b'Optical Laser' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -3148,17 +3487,25 @@ DEBUG - -> Metal Jet X-ray DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/type): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/type): -DEBUG - type of radiation source (pick one from the enumerated list and spell exactly) +DEBUG - + type of radiation source (pick one from the enumerated list and spell exactly) + DEBUG - ===== FIELD (//entry/instrument/temporal_resolution): DEBUG - value: 35.0 -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/temporal_resolution +DEBUG - <> +DEBUG - documentation (NXinstrument.nxdl.xml:/temporal_resolution): DEBUG - + Temporal resolution of the experiment (FWHM) + DEBUG - ===== ATTRS (//entry/instrument/temporal_resolution@units) DEBUG - value: fs -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/temporal_resolution +DEBUG - NXinstrument.nxdl.xml:/temporal_resolution@units [NX_TIME] DEBUG - ===== GROUP (//entry/process [NXmpes::/NXentry/NXprocess]): DEBUG - classpath: ['NXentry', 'NXprocess'] DEBUG - classes: @@ -3175,7 +3522,9 @@ DEBUG - DEBUG - documentation (NXentry.nxdl.xml:/PROCESS): DEBUG - DEBUG - documentation (NXprocess.nxdl.xml:): -DEBUG - Document an event of data processing, reconstruction, or analysis for this data. +DEBUG - + Document an event of data processing, reconstruction, or analysis for this data. + DEBUG - ===== ATTRS (//entry/process@NX_class) DEBUG - value: NXprocess DEBUG - classpath: ['NXentry', 'NXprocess'] @@ -3185,48 +3534,109 @@ NXentry.nxdl.xml:/PROCESS NXprocess.nxdl.xml: DEBUG - @NX_class [NX_CHAR] DEBUG - -DEBUG - ===== GROUP (//entry/process/distortion [NXmpes::/NXentry/NXprocess/distortion]): -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - ===== GROUP (//entry/process/distortion [NXmpes::/NXentry/NXprocess/NXdistortion]): +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXdistortion'] +DEBUG - classes: +NXprocess.nxdl.xml:/DISTORTION +NXdistortion.nxdl.xml: +DEBUG - <> +DEBUG - documentation (NXprocess.nxdl.xml:/DISTORTION): +DEBUG - + Describes the operations of image distortion correction + +DEBUG - documentation (NXdistortion.nxdl.xml:): +DEBUG - + Subclass of NXprocess to describe post-processing distortion correction. + +DEBUG - ===== ATTRS (//entry/process/distortion@NX_class) +DEBUG - value: NXdistortion +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXdistortion'] +DEBUG - classes: +NXprocess.nxdl.xml:/DISTORTION +NXdistortion.nxdl.xml: +DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/process/distortion/applied): DEBUG - value: True -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXdistortion', 'NX_BOOLEAN'] +DEBUG - classes: +NXdistortion.nxdl.xml:/applied +DEBUG - <> +DEBUG - documentation (NXdistortion.nxdl.xml:/applied): DEBUG - + Has the distortion correction been applied? + DEBUG - ===== FIELD (//entry/process/distortion/cdeform_field): DEBUG - value: [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ... -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXdistortion', 'NX_FLOAT'] +DEBUG - classes: +NXdistortion.nxdl.xml:/cdeform_field +DEBUG - <> +DEBUG - documentation (NXdistortion.nxdl.xml:/cdeform_field): DEBUG - + Column deformation field for general non-rigid distortion corrections. 2D matrix + holding the column information of the mapping of each original coordinate. + DEBUG - ===== FIELD (//entry/process/distortion/original_centre): DEBUG - value: [203. 215.] -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXdistortion', 'NX_FLOAT'] +DEBUG - classes: +NXdistortion.nxdl.xml:/original_centre +DEBUG - <> +DEBUG - documentation (NXdistortion.nxdl.xml:/original_centre): DEBUG - + For symmetry-guided distortion correction. Here we record the coordinates of the + symmetry centre point. + DEBUG - ===== FIELD (//entry/process/distortion/original_points): DEBUG - value: [166. 283.] -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXdistortion', 'NX_FLOAT'] +DEBUG - classes: +NXdistortion.nxdl.xml:/original_points +DEBUG - <> +DEBUG - documentation (NXdistortion.nxdl.xml:/original_points): DEBUG - + For symmetry-guided distortion correction. Here we record the coordinates of the + relevant symmetry points. + DEBUG - ===== FIELD (//entry/process/distortion/rdeform_field): DEBUG - value: [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. ... -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXdistortion', 'NX_FLOAT'] +DEBUG - classes: +NXdistortion.nxdl.xml:/rdeform_field +DEBUG - <> +DEBUG - documentation (NXdistortion.nxdl.xml:/rdeform_field): DEBUG - + Row deformation field for general non-rigid distortion corrections. 2D matrix + holding the row information of the mapping of each original coordinate. + DEBUG - ===== FIELD (//entry/process/distortion/symmetry): DEBUG - value: 6 -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXdistortion', 'NX_INT'] +DEBUG - classes: +NXdistortion.nxdl.xml:/symmetry +DEBUG - <> +DEBUG - documentation (NXdistortion.nxdl.xml:/symmetry): DEBUG - + For `symmetry-guided distortion correction`_, + where a pattern of features is mapped to the regular geometric structure expected + from the symmetry. Here we record the number of elementary symmetry operations. + + .. _symmetry-guided distortion correction: https://www.sciencedirect.com/science/article/abs/pii/S0304399118303474?via%3Dihub + DEBUG - ===== GROUP (//entry/process/energy_calibration [NXmpes::/NXentry/NXprocess/NXcalibration]): DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/PROCESS/energy_calibration +NXprocess.nxdl.xml:/CALIBRATION NXcalibration.nxdl.xml: DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/PROCESS/energy_calibration): DEBUG - +DEBUG - documentation (NXprocess.nxdl.xml:/CALIBRATION): +DEBUG - + Describes the operations of calibration procedures, e.g. axis calibrations. + DEBUG - documentation (NXcalibration.nxdl.xml:): DEBUG - Subclass of NXprocess to describe post-processing calibrations. @@ -3236,6 +3646,7 @@ DEBUG - value: NXcalibration DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration'] DEBUG - classes: NXmpes.nxdl.xml:/ENTRY/PROCESS/energy_calibration +NXprocess.nxdl.xml:/CALIBRATION NXcalibration.nxdl.xml: DEBUG - @NX_class [NX_CHAR] DEBUG - @@ -3282,7 +3693,13 @@ DEBUG - Use a0, a1, ..., an for the coefficients, corresponding to the values in the coefficients field. - Use x0, x1, ..., xn for the variables. + Use x0, x1, ..., xn for the nth position in the `original_axis` field. + If there is the symbol attribute specified for the `original_axis` this may be used instead of x. + If you want to use the whole axis use `x`. + Alternate axis can also be available as specified by the `input_SYMBOL` field. + The data should then be referred here by the `SYMBOL` name, e.g., for a field + name `input_my_field` it should be referred here by `my_field` or `my_field0` if + you want to read the zeroth element of the array. The formula should be numpy compliant. @@ -3296,152 +3713,508 @@ DEBUG - documentation (NXcalibration.nxdl.xml:/original_axis): DEBUG - Vector containing the data coordinates in the original uncalibrated axis -DEBUG - ===== GROUP (//entry/process/kx_calibration [NXmpes::/NXentry/NXprocess/kx_calibration]): -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - ===== GROUP (//entry/process/kx_calibration [NXmpes::/NXentry/NXprocess/NXcalibration]): +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration'] +DEBUG - classes: +NXprocess.nxdl.xml:/CALIBRATION +NXcalibration.nxdl.xml: +DEBUG - <> +DEBUG - documentation (NXprocess.nxdl.xml:/CALIBRATION): +DEBUG - + Describes the operations of calibration procedures, e.g. axis calibrations. + +DEBUG - documentation (NXcalibration.nxdl.xml:): +DEBUG - + Subclass of NXprocess to describe post-processing calibrations. + +DEBUG - ===== ATTRS (//entry/process/kx_calibration@NX_class) +DEBUG - value: NXcalibration +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration'] +DEBUG - classes: +NXprocess.nxdl.xml:/CALIBRATION +NXcalibration.nxdl.xml: +DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/process/kx_calibration/applied): DEBUG - value: True -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration', 'NX_BOOLEAN'] +DEBUG - classes: +NXcalibration.nxdl.xml:/applied +DEBUG - <> +DEBUG - documentation (NXcalibration.nxdl.xml:/applied): DEBUG - + Has the calibration been applied? + DEBUG - ===== FIELD (//entry/process/kx_calibration/calibrated_axis): DEBUG - value: [-2.68021375 -2.66974416 -2.65927458 -2.64880499 -2.63833541 -2.62786582 ... -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration', 'NX_FLOAT'] +DEBUG - classes: +NXcalibration.nxdl.xml:/calibrated_axis +DEBUG - <> +DEBUG - documentation (NXcalibration.nxdl.xml:/calibrated_axis): DEBUG - + A vector representing the axis after calibration, matching the data length + DEBUG - ===== FIELD (//entry/process/kx_calibration/offset): DEBUG - value: 256.0 -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration', 'NX_FLOAT'] +DEBUG - classes: +NXcalibration.nxdl.xml:/offset +DEBUG - <> +DEBUG - documentation (NXcalibration.nxdl.xml:/offset): DEBUG - + For linear calibration. Offset parameter. + This is should yield the relation `calibrated_axis` = `scaling` * `original_axis` + `offset`. + DEBUG - ===== FIELD (//entry/process/kx_calibration/scaling): DEBUG - value: 0.01046958495673419 -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration', 'NX_FLOAT'] +DEBUG - classes: +NXcalibration.nxdl.xml:/scaling +DEBUG - <> +DEBUG - documentation (NXcalibration.nxdl.xml:/scaling): DEBUG - -DEBUG - ===== GROUP (//entry/process/ky_calibration [NXmpes::/NXentry/NXprocess/ky_calibration]): -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA + For linear calibration. Scaling parameter. + This is should yield the relation `calibrated_axis` = `scaling` * `original_axis` + `offset`. + +DEBUG - ===== GROUP (//entry/process/ky_calibration [NXmpes::/NXentry/NXprocess/NXcalibration]): +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration'] +DEBUG - classes: +NXprocess.nxdl.xml:/CALIBRATION +NXcalibration.nxdl.xml: +DEBUG - <> +DEBUG - documentation (NXprocess.nxdl.xml:/CALIBRATION): +DEBUG - + Describes the operations of calibration procedures, e.g. axis calibrations. + +DEBUG - documentation (NXcalibration.nxdl.xml:): +DEBUG - + Subclass of NXprocess to describe post-processing calibrations. + +DEBUG - ===== ATTRS (//entry/process/ky_calibration@NX_class) +DEBUG - value: NXcalibration +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration'] +DEBUG - classes: +NXprocess.nxdl.xml:/CALIBRATION +NXcalibration.nxdl.xml: +DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/process/ky_calibration/applied): DEBUG - value: True -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration', 'NX_BOOLEAN'] +DEBUG - classes: +NXcalibration.nxdl.xml:/applied +DEBUG - <> +DEBUG - documentation (NXcalibration.nxdl.xml:/applied): DEBUG - + Has the calibration been applied? + DEBUG - ===== FIELD (//entry/process/ky_calibration/calibrated_axis): DEBUG - value: [-2.68021375 -2.66974416 -2.65927458 -2.64880499 -2.63833541 -2.62786582 ... -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration', 'NX_FLOAT'] +DEBUG - classes: +NXcalibration.nxdl.xml:/calibrated_axis +DEBUG - <> +DEBUG - documentation (NXcalibration.nxdl.xml:/calibrated_axis): DEBUG - + A vector representing the axis after calibration, matching the data length + DEBUG - ===== FIELD (//entry/process/ky_calibration/offset): DEBUG - value: 256.0 -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration', 'NX_FLOAT'] +DEBUG - classes: +NXcalibration.nxdl.xml:/offset +DEBUG - <> +DEBUG - documentation (NXcalibration.nxdl.xml:/offset): DEBUG - + For linear calibration. Offset parameter. + This is should yield the relation `calibrated_axis` = `scaling` * `original_axis` + `offset`. + DEBUG - ===== FIELD (//entry/process/ky_calibration/scaling): DEBUG - value: 0.01046958495673419 -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXcalibration', 'NX_FLOAT'] +DEBUG - classes: +NXcalibration.nxdl.xml:/scaling +DEBUG - <> +DEBUG - documentation (NXcalibration.nxdl.xml:/scaling): DEBUG - -DEBUG - ===== GROUP (//entry/process/registration [NXmpes::/NXentry/NXprocess/registration]): -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA + For linear calibration. Scaling parameter. + This is should yield the relation `calibrated_axis` = `scaling` * `original_axis` + `offset`. + +DEBUG - ===== GROUP (//entry/process/registration [NXmpes::/NXentry/NXprocess/NXregistration]): +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration'] +DEBUG - classes: +NXprocess.nxdl.xml:/REGISTRATION +NXregistration.nxdl.xml: +DEBUG - <> +DEBUG - documentation (NXprocess.nxdl.xml:/REGISTRATION): +DEBUG - + Describes the operations of image registration + +DEBUG - documentation (NXregistration.nxdl.xml:): +DEBUG - + Describes image registration procedures. + +DEBUG - ===== ATTRS (//entry/process/registration@NX_class) +DEBUG - value: NXregistration +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration'] +DEBUG - classes: +NXprocess.nxdl.xml:/REGISTRATION +NXregistration.nxdl.xml: +DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/process/registration/applied): DEBUG - value: True -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NX_BOOLEAN'] +DEBUG - classes: +NXregistration.nxdl.xml:/applied +DEBUG - <> +DEBUG - documentation (NXregistration.nxdl.xml:/applied): DEBUG - + Has the registration been applied? + DEBUG - ===== FIELD (//entry/process/registration/depends_on): DEBUG - value: b'/entry/process/registration/tranformations/rot_z' -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - -DEBUG - ===== GROUP (//entry/process/registration/tranformations [NXmpes::/NXentry/NXprocess/registration/tranformations]): -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - -DEBUG - ===== FIELD (//entry/process/registration/tranformations/rot_z): -DEBUG - value: -1.0 -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - -DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@depends_on) -DEBUG - value: trans_y -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - -DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@offset) -DEBUG - value: [256. 256. 0.] -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - -DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@transformation_type) -DEBUG - value: rotation -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - -DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@units) -DEBUG - value: degrees -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - -DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@vector) -DEBUG - value: [0. 0. 1.] -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NX_CHAR'] +DEBUG - classes: +NXregistration.nxdl.xml:/depends_on +DEBUG - <> +DEBUG - documentation (NXregistration.nxdl.xml:/depends_on): DEBUG - -DEBUG - ===== FIELD (//entry/process/registration/tranformations/trans_x): -DEBUG - value: 43.0 -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA + Specifies the position by pointing to the last transformation in the + transformation chain in the NXtransformations group. + +DEBUG - ===== GROUP (//entry/process/registration/tranformations [NXmpes::/NXentry/NXprocess/NXregistration/NXtransformations]): +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations'] +DEBUG - classes: +NXregistration.nxdl.xml:/TRANSFORMATIONS +NXtransformations.nxdl.xml: +DEBUG - <> +DEBUG - documentation (NXregistration.nxdl.xml:/TRANSFORMATIONS): DEBUG - -DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_x@depends_on) -DEBUG - value: . -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA + To describe the operations of image registration (combinations of rigid + translations and rotations) + +DEBUG - documentation (NXtransformations.nxdl.xml:): DEBUG - -DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_x@transformation_type) -DEBUG - value: translation -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA + Collection of axis-based translations and rotations to describe a geometry. + May also contain axes that do not move and therefore do not have a transformation + type specified, but are useful in understanding coordinate frames within which + transformations are done, or in documenting important directions, such as the + direction of gravity. + + A nested sequence of transformations lists the translation and rotation steps + needed to describe the position and orientation of any movable or fixed device. + + There will be one or more transformations (axes) defined by one or more fields + for each transformation. Transformations can also be described by NXlog groups when + the values change with time. The all-caps name ``AXISNAME`` designates the + particular axis generating a transformation (e.g. a rotation axis or a translation + axis or a general axis). The attribute ``units="NX_TRANSFORMATION"`` designates the + units will be appropriate to the ``transformation_type`` attribute: + + * ``NX_LENGTH`` for ``translation`` + * ``NX_ANGLE`` for ``rotation`` + * ``NX_UNITLESS`` for axes for which no transformation type is specified + + This class will usually contain all axes of a sample stage or goniometer or + a detector. The NeXus default McSTAS coordinate frame is assumed, but additional + useful coordinate axes may be defined by using axes for which no transformation + type has been specified. + + The entry point (``depends_on``) will be outside of this class and point to a + field in here. Following the chain may also require following ``depends_on`` + links to transformations outside, for example to a common base table. If + a relative path is given, it is relative to the group enclosing the ``depends_on`` + specification. + + For a chain of three transformations, where :math:`T_1` depends on :math:`T_2` + and that in turn depends on :math:`T_3`, the final transformation :math:`T_f` is + + .. math:: T_f = T_3 T_2 T_1 + + In explicit terms, the transformations are a subset of affine transformations + expressed as 4x4 matrices that act on homogeneous coordinates, :math:`w=(x,y,z,1)^T`. + + For rotation and translation, + + .. math:: T_r &= \begin{pmatrix} R & o \\ 0_3 & 1 \end{pmatrix} \\ T_t &= \begin{pmatrix} I_3 & t + o \\ 0_3 & 1 \end{pmatrix} + + where :math:`R` is the usual 3x3 rotation matrix, :math:`o` is an offset vector, + :math:`0_3` is a row of 3 zeros, :math:`I_3` is the 3x3 identity matrix and + :math:`t` is the translation vector. + + :math:`o` is given by the ``offset`` attribute, :math:`t` is given by the ``vector`` + attribute multiplied by the field value, and :math:`R` is defined as a rotation + about an axis in the direction of ``vector``, of angle of the field value. + + NOTE + + One possible use of ``NXtransformations`` is to define the motors and + transformations for a diffractometer (goniometer). Such use is mentioned + in the ``NXinstrument`` base class. Use one ``NXtransformations`` group + for each diffractometer and name the group appropriate to the device. + Collecting the motors of a sample table or xyz-stage in an NXtransformations + group is equally possible. + + + Following the section on the general dscription of axis in NXtransformations is a section which + documents the fields commonly used within NeXus for positioning purposes and their meaning. Whenever + there is a need for positioning a beam line component please use the existing names. Use as many fields + as needed in order to position the component. Feel free to add more axis if required. In the description + given below, only those atttributes which are defined through the name are spcified. Add the other attributes + of the full set: + + * vector + * offset + * transformation_type + * depends_on + + as needed. + +DEBUG - ===== ATTRS (//entry/process/registration/tranformations@NX_class) +DEBUG - value: NXtransformations +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations'] +DEBUG - classes: +NXregistration.nxdl.xml:/TRANSFORMATIONS +NXtransformations.nxdl.xml: +DEBUG - @NX_class [NX_CHAR] +DEBUG - +DEBUG - ===== FIELD (//entry/process/registration/tranformations/rot_z): +DEBUG - value: -1.0 +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME): +DEBUG - + Units need to be appropriate for translation or rotation + + The name of this field is not forced. The user is free to use any name + that does not cause confusion. When using more than one ``AXISNAME`` field, + make sure that each field name is unique in the same group, as required + by HDF5. + + The values given should be the start points of exposures for the corresponding + frames. The end points should be given in ``AXISNAME_end``. + +DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@depends_on) +DEBUG - value: trans_y +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@depends_on - [NX_CHAR] +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/depends_on): +DEBUG - + Points to the path to a field defining the axis on which this + depends or the string ".". + +DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@offset) +DEBUG - value: [256. 256. 0.] +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@offset - [NX_NUMBER] +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/offset): +DEBUG - + A fixed offset applied before the transformation (three vector components). + This is not intended to be a substitute for a fixed ``translation`` axis but, for example, + as the mechanical offset from mounting the axis to its dependency. + +DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@transformation_type) +DEBUG - value: rotation +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@transformation_type - [NX_CHAR] +DEBUG - <> +DEBUG - enumeration (NXtransformations.nxdl.xml:/AXISNAME/transformation_type): +DEBUG - -> translation +DEBUG - -> rotation +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/transformation_type): +DEBUG - + The transformation_type may be ``translation``, in which case the + values are linear displacements along the axis, ``rotation``, + in which case the values are angular rotations around the axis. + + If this attribute is omitted, this is an axis for which there + is no motion to be specifies, such as the direction of gravity, + or the direction to the source, or a basis vector of a + coordinate frame. + +DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@units) +DEBUG - value: degrees +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@units [NX_TRANSFORMATION] +DEBUG - ===== ATTRS (//entry/process/registration/tranformations/rot_z@vector) +DEBUG - value: [0. 0. 1.] +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@vector - [NX_NUMBER] +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/vector): +DEBUG - + Three values that define the axis for this transformation. + The axis should be normalized to unit length, making it + dimensionless. For ``rotation`` axes, the direction should be + chosen for a right-handed rotation with increasing angle. + For ``translation`` axes the direction should be chosen for + increasing displacement. For general axes, an appropriate direction + should be chosen. + +DEBUG - ===== FIELD (//entry/process/registration/tranformations/trans_x): +DEBUG - value: 43.0 +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME): +DEBUG - + Units need to be appropriate for translation or rotation + + The name of this field is not forced. The user is free to use any name + that does not cause confusion. When using more than one ``AXISNAME`` field, + make sure that each field name is unique in the same group, as required + by HDF5. + + The values given should be the start points of exposures for the corresponding + frames. The end points should be given in ``AXISNAME_end``. + +DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_x@depends_on) +DEBUG - value: . +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@depends_on - [NX_CHAR] +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/depends_on): +DEBUG - + Points to the path to a field defining the axis on which this + depends or the string ".". + +DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_x@transformation_type) +DEBUG - value: translation +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@transformation_type - [NX_CHAR] +DEBUG - <> +DEBUG - enumeration (NXtransformations.nxdl.xml:/AXISNAME/transformation_type): +DEBUG - -> translation +DEBUG - -> rotation +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/transformation_type): DEBUG - + The transformation_type may be ``translation``, in which case the + values are linear displacements along the axis, ``rotation``, + in which case the values are angular rotations around the axis. + + If this attribute is omitted, this is an axis for which there + is no motion to be specifies, such as the direction of gravity, + or the direction to the source, or a basis vector of a + coordinate frame. + DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_x@units) DEBUG - value: pixels -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@units [NX_TRANSFORMATION] DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_x@vector) DEBUG - value: [1. 0. 0.] -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@vector - [NX_NUMBER] +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/vector): DEBUG - + Three values that define the axis for this transformation. + The axis should be normalized to unit length, making it + dimensionless. For ``rotation`` axes, the direction should be + chosen for a right-handed rotation with increasing angle. + For ``translation`` axes the direction should be chosen for + increasing displacement. For general axes, an appropriate direction + should be chosen. + DEBUG - ===== FIELD (//entry/process/registration/tranformations/trans_y): DEBUG - value: 55.0 -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME): DEBUG - + Units need to be appropriate for translation or rotation + + The name of this field is not forced. The user is free to use any name + that does not cause confusion. When using more than one ``AXISNAME`` field, + make sure that each field name is unique in the same group, as required + by HDF5. + + The values given should be the start points of exposures for the corresponding + frames. The end points should be given in ``AXISNAME_end``. + DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_y@depends_on) DEBUG - value: trans_x -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@depends_on - [NX_CHAR] +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/depends_on): DEBUG - + Points to the path to a field defining the axis on which this + depends or the string ".". + DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_y@transformation_type) DEBUG - value: translation -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@transformation_type - [NX_CHAR] +DEBUG - <> +DEBUG - enumeration (NXtransformations.nxdl.xml:/AXISNAME/transformation_type): +DEBUG - -> translation +DEBUG - -> rotation +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/transformation_type): DEBUG - + The transformation_type may be ``translation``, in which case the + values are linear displacements along the axis, ``rotation``, + in which case the values are angular rotations around the axis. + + If this attribute is omitted, this is an axis for which there + is no motion to be specifies, such as the direction of gravity, + or the direction to the source, or a basis vector of a + coordinate frame. + DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_y@units) DEBUG - value: pixels -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@units [NX_TRANSFORMATION] DEBUG - ===== ATTRS (//entry/process/registration/tranformations/trans_y@vector) DEBUG - value: [0. 1. 0.] -DEBUG - classpath: ['NXentry', 'NXprocess'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXprocess', 'NXregistration', 'NXtransformations', 'NX_NUMBER'] +DEBUG - classes: +NXtransformations.nxdl.xml:/AXISNAME +DEBUG - NXtransformations.nxdl.xml:/AXISNAME@vector - [NX_NUMBER] +DEBUG - <> +DEBUG - documentation (NXtransformations.nxdl.xml:/AXISNAME/vector): DEBUG - + Three values that define the axis for this transformation. + The axis should be normalized to unit length, making it + dimensionless. For ``rotation`` axes, the direction should be + chosen for a right-handed rotation with increasing angle. + For ``translation`` axes the direction should be chosen for + increasing displacement. For general axes, an appropriate direction + should be chosen. + DEBUG - ===== GROUP (//entry/sample [NXmpes::/NXentry/NXsample]): DEBUG - classpath: ['NXentry', 'NXsample'] DEBUG - classes: @@ -3455,12 +4228,12 @@ DEBUG - documentation (NXentry.nxdl.xml:/SAMPLE): DEBUG - DEBUG - documentation (NXsample.nxdl.xml:): DEBUG - - Any information on the sample. - - This could include scanned variables that - are associated with one of the data dimensions, e.g. the magnetic field, or - logged data, e.g. monitored temperature vs elapsed time. - + Any information on the sample. + + This could include scanned variables that + are associated with one of the data dimensions, e.g. the magnetic field, or + logged data, e.g. monitored temperature vs elapsed time. + DEBUG - ===== ATTRS (//entry/sample@NX_class) DEBUG - value: NXsample DEBUG - classpath: ['NXentry', 'NXsample'] @@ -3472,14 +4245,20 @@ DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/sample/bias): DEBUG - value: 17.799719004221362 -DEBUG - classpath: ['NXentry', 'NXsample'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXsample', 'NX_FLOAT'] +DEBUG - classes: +NXmpes.nxdl.xml:/ENTRY/SAMPLE/bias +DEBUG - <> +DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/SAMPLE/bias): DEBUG - + Voltage applied to sample and sample holder. + DEBUG - ===== ATTRS (//entry/sample/bias@units) DEBUG - value: V -DEBUG - classpath: ['NXentry', 'NXsample'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXsample', 'NX_FLOAT'] +DEBUG - classes: +NXmpes.nxdl.xml:/ENTRY/SAMPLE/bias +DEBUG - NXmpes.nxdl.xml:/ENTRY/SAMPLE/bias@units [NX_VOLTAGE] DEBUG - ===== FIELD (//entry/sample/chemical_formula): DEBUG - value: b'MoTe2' DEBUG - classpath: ['NXentry', 'NXsample', 'NX_CHAR'] @@ -3494,25 +4273,25 @@ DEBUG - DEBUG - documentation (NXsample.nxdl.xml:/chemical_formula): DEBUG - - The chemical formula specified using CIF conventions. - Abbreviated version of CIF standard: - - * Only recognized element symbols may be used. - * Each element symbol is followed by a 'count' number. A count of '1' may be omitted. - * A space or parenthesis must separate each cluster of (element symbol + count). - * Where a group of elements is enclosed in parentheses, the multiplier for the - group must follow the closing parentheses. That is, all element and group - multipliers are assumed to be printed as subscripted numbers. - * Unless the elements are ordered in a manner that corresponds to their chemical - structure, the order of the elements within any group or moiety depends on - whether or not carbon is present. - * If carbon is present, the order should be: - - - C, then H, then the other elements in alphabetical order of their symbol. - - If carbon is not present, the elements are listed purely in alphabetic order of their symbol. - - * This is the *Hill* system used by Chemical Abstracts. - + The chemical formula specified using CIF conventions. + Abbreviated version of CIF standard: + + * Only recognized element symbols may be used. + * Each element symbol is followed by a 'count' number. A count of '1' may be omitted. + * A space or parenthesis must separate each cluster of (element symbol + count). + * Where a group of elements is enclosed in parentheses, the multiplier for the + group must follow the closing parentheses. That is, all element and group + multipliers are assumed to be printed as subscripted numbers. + * Unless the elements are ordered in a manner that corresponds to their chemical + structure, the order of the elements within any group or moiety depends on + whether or not carbon is present. + * If carbon is present, the order should be: + + - C, then H, then the other elements in alphabetical order of their symbol. + - If carbon is not present, the elements are listed purely in alphabetic order of their symbol. + + * This is the *Hill* system used by Chemical Abstracts. + DEBUG - ===== FIELD (//entry/sample/depends_on): DEBUG - value: b'/entry/sample/transformations/corrected_phi' DEBUG - classpath: ['NXentry', 'NXsample', 'NX_CHAR'] @@ -3521,12 +4300,12 @@ NXsample.nxdl.xml:/depends_on DEBUG - <> DEBUG - documentation (NXsample.nxdl.xml:/depends_on): DEBUG - - NeXus positions components by applying a set of translations and rotations - to apply to the component starting from 0, 0, 0. The order of these operations - is critical and forms what NeXus calls a dependency chain. The depends_on - field defines the path to the top most operation of the dependency chain or the - string "." if located in the origin. Usually these operations are stored in a - NXtransformations group. But NeXus allows them to be stored anywhere. + NeXus positions components by applying a set of translations and rotations + to apply to the component starting from 0, 0, 0. The order of these operations + is critical and forms what NeXus calls a dependency chain. The depends_on + field defines the path to the top most operation of the dependency chain or the + string "." if located in the origin. Usually these operations are stored in a + NXtransformations group. But NeXus allows them to be stored anywhere. DEBUG - ===== FIELD (//entry/sample/description): DEBUG - value: b'MoTe2' @@ -3536,8 +4315,8 @@ NXsample.nxdl.xml:/description DEBUG - <> DEBUG - documentation (NXsample.nxdl.xml:/description): DEBUG - - Description of the sample - + Description of the sample + DEBUG - ===== FIELD (//entry/sample/gas_pressure): DEBUG - value: 4.5599999999999996e-11 DEBUG - classpath: ['NXentry', 'NXsample', 'NX_FLOAT'] @@ -3562,7 +4341,9 @@ DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/SAMPLE/name): DEBUG - DEBUG - documentation (NXsample.nxdl.xml:/name): -DEBUG - Descriptive name of sample +DEBUG - + Descriptive name of sample + DEBUG - ===== FIELD (//entry/sample/preparation_date): DEBUG - value: b'2019-05-22T14:00:00+00:00' DEBUG - classpath: ['NXentry', 'NXsample', 'NX_DATE_TIME'] @@ -3576,7 +4357,9 @@ DEBUG - annealing). DEBUG - documentation (NXsample.nxdl.xml:/preparation_date): -DEBUG - Date of preparation of the sample +DEBUG - + Date of preparation of the sample + DEBUG - ===== GROUP (//entry/sample/preparation_description [NXmpes::/NXentry/NXsample/NXnote]): DEBUG - classpath: ['NXentry', 'NXsample', 'NXnote'] DEBUG - classes: @@ -3675,10 +4458,10 @@ DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/SAMPLE/situation): DEBUG - DEBUG - documentation (NXsample.nxdl.xml:/situation): DEBUG - - The atmosphere will be one of the components, which is where - its details will be stored; the relevant components will be - indicated by the entry in the sample_component member. - + The atmosphere will be one of the components, which is where + its details will be stored; the relevant components will be + indicated by the entry in the sample_component member. + DEBUG - ===== FIELD (//entry/sample/temperature): DEBUG - value: 23.050763803680983 DEBUG - classpath: ['NXentry', 'NXsample', 'NX_FLOAT'] @@ -3694,7 +4477,9 @@ DEBUG - /entry/instrument/manipulator/sample_temperature. DEBUG - documentation (NXsample.nxdl.xml:/temperature): -DEBUG - Sample temperature. This could be a scanned variable +DEBUG - + Sample temperature. This could be a scanned variable + DEBUG - ===== ATTRS (//entry/sample/temperature@units) DEBUG - value: K DEBUG - classpath: ['NXentry', 'NXsample', 'NX_FLOAT'] @@ -3711,10 +4496,10 @@ NXtransformations.nxdl.xml: DEBUG - <> DEBUG - documentation (NXsample.nxdl.xml:/TRANSFORMATIONS): DEBUG - - This is the group recommended for holding the chain of translation - and rotation operations necessary to position the component within - the instrument. The dependency chain may however traverse similar groups in - other component groups. + This is the group recommended for holding the chain of translation + and rotation operations necessary to position the component within + the instrument. The dependency chain may however traverse similar groups in + other component groups. DEBUG - documentation (NXtransformations.nxdl.xml:): DEBUG - @@ -4331,7 +5116,9 @@ DEBUG - Datetime of the start of the measurement. DEBUG - documentation (NXentry.nxdl.xml:/start_time): -DEBUG - Starting time of measurement +DEBUG - + Starting time of measurement + DEBUG - ===== FIELD (//entry/title): DEBUG - value: b'Valence Band Dynamics - 1030 nm linear p-polarized pump, 0.6 mJ/cm2 absorbed fluence' DEBUG - classpath: ['NXentry', 'NX_CHAR'] @@ -4342,7 +5129,9 @@ DEBUG - <> DEBUG - documentation (NXmpes.nxdl.xml:/ENTRY/title): DEBUG - DEBUG - documentation (NXentry.nxdl.xml:/title): -DEBUG - Extended title for entry +DEBUG - + Extended title for entry + DEBUG - ===== GROUP (//entry/user [NXmpes::/NXentry/NXuser]): DEBUG - classpath: ['NXentry', 'NXuser'] DEBUG - classes: diff --git a/tests/data/dataconverter/readers/mpes/config_file.json b/tests/data/dataconverter/readers/mpes/config_file.json index c584a5011..125243397 100644 --- a/tests/data/dataconverter/readers/mpes/config_file.json +++ b/tests/data/dataconverter/readers/mpes/config_file.json @@ -332,19 +332,10 @@ "/ENTRY[entry]/PROCESS[process]/CALIBRATION[ky_calibration]/offset": "@attrs:metadata/momentum_correction/offset_ky", "/ENTRY[entry]/PROCESS[process]/CALIBRATION[ky_calibration]/calibrated_axis": "@attrs:metadata/momentum_correction/calibration/axis_ky", "/ENTRY[entry]/DATA[data]/@axes": "@data:dims", - "/ENTRY[entry]/DATA[data]/AXISNAME_indices[@kx_indices]": "@data:kx.index", - "/ENTRY[entry]/DATA[data]/AXISNAME_indices[@ky_indices]": "@data:ky.index", - "/ENTRY[entry]/DATA[data]/AXISNAME_indices[@energy_indices]": "@data:energy.index", - "/ENTRY[entry]/DATA[data]/AXISNAME_indices[@delay_indices]": "@data:delay.index", + "/ENTRY[entry]/DATA[data]/AXISNAME_indices[@*_indices]": "@data:*.index", "/ENTRY[entry]/DATA[data]/@signal": "data", "/ENTRY[entry]/DATA[data]/data": "@data:data", "/ENTRY[entry]/DATA[data]/data/@units": "counts", - "/ENTRY[entry]/DATA[data]/VARIABLE[kx]": "@data:kx.data", - "/ENTRY[entry]/DATA[data]/VARIABLE[kx]/@units": "@data:kx.unit", - "/ENTRY[entry]/DATA[data]/VARIABLE[ky]": "@data:ky.data", - "/ENTRY[entry]/DATA[data]/VARIABLE[ky]/@units": "@data:ky.unit", - "/ENTRY[entry]/DATA[data]/VARIABLE[energy]": "@data:energy.data", - "/ENTRY[entry]/DATA[data]/VARIABLE[energy]/@units": "@data:energy.unit", - "/ENTRY[entry]/DATA[data]/VARIABLE[delay]": "@data:delay.data", - "/ENTRY[entry]/DATA[data]/VARIABLE[delay]/@units": "@data:delay.unit" + "/ENTRY[entry]/DATA[data]/AXISNAME[*]": "@data:*.data", + "/ENTRY[entry]/DATA[data]/AXISNAME[*]/@units": "@data:*.unit" } \ No newline at end of file diff --git a/tests/data/dataconverter/readers/xrd/ACZCTS_5-60_181.xrdml b/tests/data/dataconverter/readers/xrd/ACZCTS_5-60_181.xrdml new file mode 100644 index 000000000..5af61b718 --- /dev/null +++ b/tests/data/dataconverter/readers/xrd/ACZCTS_5-60_181.xrdml @@ -0,0 +1,106 @@ + + + + Configuration=XYZ Stage, Owner=User-1, Creation date=02-Nov-17 2:08:05 PM + Goniometer=PW3050/60 (Theta/Theta); Minimum step size 2Theta:0.001; Minimum step size Omega:0.001 + Sample stage=Programmable x,y,z stage; Minimum step size X:0.01; Minimum step size Y:0.01; Minimum step size Z:0.001 + Diffractometer system=XPERT-PRO + Measurement program=D:\user\Pepe\program_files\BB_10-80_18min.xrdmp, Identifier={5202C7B3-EFFD-43D7-83CA-1A77018B086F} + Batch program=D:\user\Pepe\program_files\batch_5samples.xrdmp, Identifier={49B9A751-97B5-49F0-9CAA-E07AE5E71B6A} + + + + + + + + + PHD Lower Level = 4.02 (keV), PHD Upper Level = 11.26 (keV) + + + 1.5405980 + 1.5444260 + 1.3922500 + 0.5000 + + + 240.00 + + 40 + 40 + Cu + + 12.0 + 0.4 + 6.0 + + + + 0.0400 + + + 11.60 + + + 140.00 + 0.38 + + + + 240.00 + + 8.00 + + + 0.0400 + + + Ni + 0.020 + + + + 25.0 + 70.0 + + Scanning + 3.347 + + + +
+ 2018-06-12T18:34:49+02:00 + 2018-06-12T18:53:34+02:00 + + cnu_xlab + + + Data Collector + XPERT-PRO + 0000000011035964 + +
+ + + 10.00656514 + 79.99097181 + + + 5.00328257 + 39.99548591 + + + 0.00 + + + 50.00 + + + 7.868 + + 49.470 + 2086 2053 2118 2024 2127 2128 2115 2093 2063 1985 2038 2118 2107 2136 2080 2150 2036 2039 2073 2025 2069 2028 2070 1975 1978 2068 2116 2075 2000 2082 1944 2037 1994 2061 2035 2132 2046 2133 1981 2066 1986 2008 2028 2052 2065 2136 2104 2096 1935 2029 2036 2023 1933 2023 2040 2080 1964 2003 1960 1940 2079 2102 1959 2058 1970 2002 1974 1955 1998 2025 2060 1908 2014 2037 1925 2028 1944 2052 2012 2002 2018 1977 2063 1963 2073 1963 1964 1904 1971 1979 1975 2012 2008 1988 1946 1990 1894 1990 1897 1979 1934 2009 1986 1978 1915 1915 2010 1953 1891 1978 1983 1997 1984 1959 2032 1990 1949 1959 1925 1978 1963 1879 2055 1883 1974 1949 1929 1987 2000 1893 1926 1961 1978 1920 1965 1962 1919 1930 2007 1970 1960 1904 2005 1946 1893 1950 1908 1906 1983 1885 1928 1882 1835 1916 1951 2040 1986 1905 1970 1889 1916 2011 1973 1898 1954 1935 1839 1977 1876 1834 1850 1844 1928 2021 1913 1858 1860 1800 1907 1844 1850 1856 1924 1896 1902 1910 1904 1881 1916 2005 1825 1881 1902 1986 1936 1860 1928 1935 1858 1918 1873 1854 1954 1807 1871 1778 1847 1898 1836 1861 1855 1907 1804 1907 1861 1833 1904 1835 1899 1862 1826 1881 1905 1876 1969 1962 1869 1888 1808 1770 1888 1865 1794 1852 1834 1851 1840 1846 1872 1850 1829 1818 1817 1869 1760 1888 1895 1866 1896 1807 1867 1834 1743 1835 1866 1817 1816 1821 1781 1828 1790 1836 1841 1774 1831 1825 1804 1795 1776 1888 1805 1771 1837 1857 1777 1758 1801 1864 1796 1808 1801 1798 1786 1758 1815 1865 1809 1767 1790 1785 1719 1869 1762 1819 1739 1855 1746 1796 1804 1748 1745 1787 1835 1826 1753 1823 1867 1833 1777 1761 1778 1806 1826 1833 1740 1735 1792 1788 1738 1704 1711 1767 1841 1749 1791 1850 1794 1735 1761 1863 1800 1673 1684 1738 1651 1779 1686 1700 1767 1737 1722 1805 1812 1746 1720 1647 1744 1746 1724 1739 1734 1697 1780 1684 1672 1699 1667 1685 1777 1647 1695 1761 1750 1700 1724 1719 1678 1726 1727 1653 1766 1753 1753 1729 1816 1708 1720 1661 1686 1812 1733 1823 1696 1716 1663 1727 1665 1731 1756 1698 1692 1726 1676 1751 1761 1628 1774 1719 1565 1690 1709 1667 1660 1663 1682 1759 1685 1673 1653 1724 1648 1745 1698 1615 1708 1691 1619 1694 1603 1620 1647 1663 1673 1646 1616 1627 1618 1615 1624 1630 1625 1629 1705 1657 1661 1633 1613 1611 1673 1643 1653 1576 1696 1649 1582 1601 1565 1696 1637 1592 1639 1579 1665 1561 1640 1716 1640 1632 1674 1712 1723 1647 1688 1666 1728 1706 1780 1731 1737 1801 1785 1910 1889 1967 1925 1833 1872 1855 1773 1799 1784 1684 1751 1765 1688 1633 1661 1699 1747 1668 1631 1607 1620 1690 1590 1594 1633 1665 1666 1698 1652 1642 1628 1662 1647 1621 1646 1674 1669 1696 1640 1717 1634 1611 1637 1670 1653 1583 1606 1620 1690 1528 1627 1596 1689 1625 1576 1612 1543 1716 1575 1690 1571 1586 1556 1637 1611 1608 1586 1670 1579 1646 1570 1695 1602 1518 1491 1679 1755 1685 1729 1572 1564 1592 1655 1661 1609 1621 1652 1676 1612 1631 1617 1571 1584 1690 1624 1660 1667 1635 1606 1670 1555 1604 1709 1646 1652 1613 1663 1588 1648 1625 1548 1654 1639 1542 1495 1639 1628 1602 1615 1609 1541 1721 1580 1640 1567 1604 1685 1610 1619 1677 1753 1667 1723 1697 1782 1777 1682 1737 1746 1654 1730 1687 1592 1552 1590 1576 1606 1608 1541 1589 1489 1596 1494 1598 1591 1610 1553 1527 1602 1626 1664 1549 1542 1580 1587 1544 1562 1563 1533 1482 1508 1602 1555 1557 1585 1507 1469 1582 1552 1575 1575 1528 1515 1518 1539 1560 1615 1491 1523 1537 1474 1537 1571 1521 1523 1560 1522 1488 1466 1627 1528 1637 1553 1580 1619 1505 1472 1552 1443 1558 1501 1542 1589 1574 1532 1566 1446 1503 1555 1563 1467 1492 1476 1570 1540 1562 1515 1480 1491 1483 1509 1455 1549 1502 1543 1531 1546 1516 1447 1527 1480 1592 1550 1496 1496 1474 1431 1468 1537 1519 1526 1507 1525 1500 1494 1503 1504 1522 1541 1584 1522 1466 1498 1435 1533 1520 1584 1415 1490 1406 1520 1481 1548 1440 1471 1559 1511 1571 1518 1544 1440 1410 1451 1516 1437 1549 1459 1493 1510 1533 1487 1419 1475 1452 1500 1415 1438 1474 1493 1436 1449 1422 1441 1404 1407 1422 1413 1477 1408 1419 1475 1425 1414 1501 1335 1431 1434 1487 1430 1496 1440 1489 1420 1435 1537 1352 1456 1438 1460 1455 1443 1533 1434 1506 1417 1441 1440 1429 1483 1400 1475 1512 1424 1454 1485 1401 1513 1449 1403 1393 1417 1432 1438 1391 1393 1380 1376 1473 1430 1387 1405 1434 1376 1441 1394 1407 1389 1363 1396 1452 1420 1396 1360 1412 1432 1427 1375 1424 1367 1372 1407 1523 1410 1372 1383 1417 1449 1330 1343 1407 1377 1416 1448 1429 1498 1464 1412 1390 1402 1313 1351 1434 1360 1410 1412 1370 1343 1359 1402 1381 1352 1419 1361 1417 1388 1388 1359 1396 1366 1369 1368 1338 1333 1406 1336 1351 1366 1369 1364 1365 1358 1346 1372 1340 1251 1314 1363 1472 1391 1344 1398 1394 1363 1410 1312 1430 1450 1381 1410 1371 1371 1378 1370 1316 1326 1355 1257 1303 1326 1388 1322 1368 1324 1350 1403 1413 1423 1403 1400 1414 1363 1403 1318 1324 1309 1360 1325 1376 1402 1391 1419 1323 1338 1477 1372 1430 1425 1427 1426 1414 1478 1515 1547 1635 1621 1653 1795 1726 1706 1611 1538 1541 1466 1505 1430 1449 1358 1377 1375 1350 1315 1356 1312 1279 1285 1303 1285 1300 1385 1283 1302 1413 1344 1327 1288 1288 1317 1321 1322 1377 1346 1385 1331 1419 1350 1271 1405 1356 1318 1283 1328 1303 1358 1352 1333 1310 1267 1276 1287 1318 1304 1277 1321 1274 1294 1292 1333 1235 1233 1249 1299 1334 1313 1266 1321 1278 1293 1267 1257 1247 1381 1313 1342 1268 1300 1239 1286 1328 1319 1300 1333 1300 1285 1355 1202 1251 1340 1255 1306 1300 1270 1277 1292 1182 1265 1332 1291 1211 1296 1366 1242 1257 1323 1296 1255 1241 1312 1310 1206 1198 1234 1269 1322 1272 1341 1339 1195 1247 1284 1278 1323 1287 1242 1251 1225 1273 1271 1231 1266 1208 1246 1289 1229 1221 1224 1266 1248 1256 1273 1227 1254 1246 1207 1296 1292 1236 1245 1275 1252 1216 1250 1241 1218 1246 1249 1253 1291 1250 1269 1266 1186 1229 1283 1243 1297 1224 1201 1200 1192 1246 1211 1275 1297 1274 1202 1239 1243 1223 1211 1250 1300 1267 1232 1233 1256 1213 1168 1234 1349 1331 1229 1280 1344 1281 1302 1265 1316 1297 1229 1284 1241 1275 1268 1292 1315 1323 1357 1363 1362 1387 1428 1480 1582 1459 1478 1425 1303 1284 1233 1312 1269 1249 1218 1215 1258 1183 1238 1224 1276 1238 1216 1148 1141 1144 1158 1159 1140 1203 1234 1224 1269 1147 1167 1192 1225 1160 1175 1169 1266 1204 1175 1227 1173 1211 1202 1143 1201 1170 1171 1255 1246 1222 1211 1132 1184 1160 1163 1158 1107 1197 1202 1143 1243 1137 1212 1182 1156 1166 1213 1232 1217 1229 1170 1203 1172 1175 1246 1214 1185 1185 1280 1136 1201 1170 1210 1164 1180 1205 1272 1156 1104 1202 1187 1190 1162 1207 1186 1184 1289 1187 1165 1156 1144 1125 1250 1222 1215 1224 1204 1247 1176 1213 1226 1270 1237 1336 1323 1295 1384 1445 1468 1431 1459 1497 1540 1510 1544 1524 1612 1554 1417 1518 1441 1484 1376 1415 1396 1443 1360 1359 1317 1347 1352 1378 1335 1360 1331 1303 1429 1285 1338 1341 1417 1356 1437 1353 1380 1410 1412 1360 1425 1516 1457 1554 1527 1489 1613 1672 1661 1901 1849 1830 2014 2074 2072 2293 2524 2763 2892 3235 3594 4081 4820 5534 6332 7671 9236 11473 14125 17900 23015 29549 33330 33088 27079 20836 18840 18681 17777 14397 9708 6420 4517 3701 3046 2821 2422 2290 2084 1966 1863 1790 1692 1678 1594 1564 1529 1448 1395 1441 1498 1455 1417 1441 1394 1403 1384 1394 1352 1303 1333 1309 1321 1279 1271 1318 1254 1327 1285 1258 1198 1261 1278 1294 1196 1197 1174 1202 1253 1179 1211 1214 1233 1231 1283 1273 1231 1340 1337 1360 1363 1362 1396 1431 1401 1420 1445 1465 1394 1373 1325 1298 1284 1235 1245 1276 1199 1192 1156 1157 1237 1179 1148 1088 1159 1145 1172 1160 1075 1136 1159 1134 1135 1158 1143 1194 1114 1122 1081 1131 1077 1119 1091 1061 1091 1113 1159 1037 1109 1065 1105 1117 1073 1134 1090 1073 1101 1094 1043 1039 1121 1075 1125 1082 1023 1045 1042 1133 1115 1070 1014 1019 1069 998 1116 983 1030 1053 1024 1022 1063 1058 1018 1089 1034 1040 1041 1099 1002 1066 1008 1048 978 1110 1041 1032 1064 1032 1047 1086 1049 1076 1088 1003 1041 1056 1099 1101 1084 1074 1110 1095 1106 1058 1145 1141 1113 1142 1177 1087 1096 1122 1108 1097 1076 1036 1041 1020 1045 1116 1056 1040 1030 1053 1030 1022 1052 1030 954 1044 1052 1013 1018 1035 1013 1010 1043 989 1009 1027 994 997 1043 962 1013 1033 1044 971 988 989 988 991 1044 1000 967 1005 1016 1037 1011 991 957 965 1031 1053 992 996 1044 969 953 986 1021 986 989 972 939 1001 1043 974 981 1020 1019 957 967 1002 1016 969 949 988 912 983 975 952 990 1015 927 1035 996 1043 933 992 1016 1005 989 920 1060 1003 955 969 1014 961 985 1030 932 987 1021 1034 1019 998 1031 1051 954 1008 946 977 980 993 973 1004 1081 1070 1027 1053 1010 1072 1120 1079 1051 1087 1064 1098 1081 1102 1189 1120 1224 1377 1318 1410 1504 1582 1717 1620 1629 1555 1488 1500 1486 1385 1416 1364 1329 1200 1227 1226 1149 1208 1182 1181 1083 1164 1187 1191 1186 1178 1154 1166 1200 1227 1249 1292 1205 1329 1246 1279 1353 1384 1390 1364 1287 1281 1338 1325 1273 1220 1303 1157 1210 1195 1148 1170 1176 1259 1163 1127 1185 1174 1123 1107 1146 1196 1065 1077 1128 1119 1200 1164 1144 1166 1248 1154 1154 1115 1122 1112 1136 1086 1155 1273 1124 1190 1151 1071 1154 1074 1167 1134 1114 1082 1077 1105 1022 1070 1051 1053 1015 980 1021 1005 997 1009 1021 952 1012 1061 1032 1008 1006 1019 1006 947 965 994 1056 907 970 981 955 955 993 1011 1002 951 1013 1017 976 984 889 984 964 970 984 931 933 933 932 916 945 946 944 930 921 924 907 917 907 1024 953 904 927 937 921 950 948 975 869 926 861 946 915 886 891 923 895 879 825 899 878 919 896 879 860 832 928 911 864 882 921 888 892 879 846 963 919 952 893 910 913 884 954 824 884 879 891 860 879 917 887 928 857 933 857 927 881 875 844 873 910 881 840 887 854 899 860 909 857 885 835 854 887 896 932 893 896 865 879 899 878 887 844 843 864 850 800 869 905 857 894 897 928 873 841 822 886 849 828 854 922 882 855 825 817 858 901 863 840 894 886 902 862 842 877 854 845 870 823 864 892 890 866 854 860 917 871 888 863 900 860 794 866 912 935 884 827 914 891 924 964 899 892 890 884 907 851 876 924 944 996 974 1058 1011 1002 1055 1089 1091 1147 1209 1167 1237 1157 1164 1107 1082 1046 1075 1041 1022 982 979 956 953 881 831 861 889 894 834 901 873 839 878 895 891 954 916 958 959 908 969 913 950 835 887 910 902 948 909 875 877 905 856 900 926 872 843 854 829 836 857 904 800 817 899 894 871 778 834 821 873 878 799 877 853 847 847 874 902 901 864 845 835 897 850 833 810 804 850 876 897 905 887 860 858 899 887 834 889 863 893 830 914 925 927 909 885 965 940 947 907 911 916 873 885 847 923 846 836 850 856 804 784 847 862 824 844 881 837 813 818 848 887 887 853 808 804 853 840 862 919 837 820 808 905 838 861 857 817 800 841 871 880 874 839 889 855 844 875 874 872 848 881 896 831 822 833 840 876 858 847 854 841 841 930 977 950 939 940 956 932 952 959 1014 992 967 984 973 950 984 1008 997 994 991 1006 1012 1063 1010 1091 1115 1160 1105 1179 1198 1202 1222 1282 1314 1311 1389 1377 1405 1312 1393 1377 1416 1385 1349 1347 1410 1391 1327 1442 1382 1349 1386 1397 1445 1386 1474 1458 1450 1414 1420 1448 1469 1521 1501 1487 1537 1531 1456 1594 1529 1635 1574 1574 1611 1675 1568 1634 1589 1642 1717 1679 1708 1737 1659 1745 1763 1717 1730 1847 1782 1803 1864 1952 2040 2017 2101 2098 2067 2177 2236 2309 2344 2535 2521 2625 2722 2983 2906 3164 3413 3643 3771 4111 4374 4700 5170 5606 5920 6519 6981 7807 8535 9324 10317 11260 12671 13820 15930 17757 19828 22311 25843 29316 33738 38285 43955 50089 56440 62996 69428 74307 77322 77919 76144 73310 68798 65125 61819 58419 55442 51345 46303 41361 35801 31152 26874 23004 19856 17349 15284 13441 12150 10675 9276 8485 7737 6932 6383 5863 5307 4915 4516 4192 4020 3706 3556 3398 3187 3064 3015 2861 2714 2625 2473 2409 2405 2286 2247 2259 2141 2098 2051 2054 1956 1979 1973 1864 1824 1795 1720 1800 1753 1688 1682 1568 1642 1570 1530 1523 1570 1583 1501 1501 1572 1478 1499 1405 1369 1423 1472 1457 1392 1361 1440 1305 1326 1352 1329 1302 1309 1278 1278 1295 1284 1252 1222 1234 1274 1212 1236 1183 1202 1161 1198 1265 1141 1123 1131 1172 1237 1158 1115 1172 1226 1122 1127 1172 1135 1089 1077 1100 1073 1033 1042 1064 1088 1121 1092 1095 1104 1061 1058 1049 1057 1040 1007 1050 1014 1042 1026 979 1082 1040 963 953 965 999 1017 1014 1025 907 935 1023 983 982 990 959 1025 939 897 917 925 998 984 948 964 936 1010 921 922 883 982 930 872 912 955 922 924 937 894 982 964 934 911 908 928 981 956 985 959 972 990 946 962 1004 967 1025 1085 1038 1068 1072 1065 1095 1119 1100 1065 1160 1138 1098 1134 1032 1072 1072 1024 1033 1015 1016 996 945 937 909 961 984 903 908 955 880 893 866 874 903 877 892 865 942 895 904 892 900 887 914 865 896 841 876 916 940 936 934 987 912 945 959 947 980 979 997 1050 1052 1105 1066 1156 1137 1118 1094 1225 1177 1199 1180 1166 1105 1174 1144 1095 1112 1066 1063 1089 1029 1004 1026 919 991 944 916 893 879 903 949 953 879 934 917 878 875 872 918 923 910 897 866 808 857 827 816 837 814 820 768 802 777 731 818 759 782 808 815 797 724 792 745 759 714 787 777 749 758 865 767 707 810 772 737 739 748 727 764 787 753 761 763 766 766 765 730 828 789 862 801 816 795 797 832 888 888 821 772 802 848 831 795 883 789 794 828 761 784 797 810 764 785 774 738 728 736 745 773 733 753 731 742 675 736 741 721 728 726 731 726 733 761 741 734 711 762 716 702 726 742 749 688 726 758 775 728 746 709 694 687 694 639 724 727 777 671 693 747 667 711 743 738 741 730 723 756 713 721 680 675 718 708 704 745 728 738 675 686 738 708 758 711 702 756 793 749 717 792 693 756 763 775 821 840 877 932 934 1009 1090 1302 1464 1448 1383 1347 1208 1126 1163 1175 1169 1176 1209 1098 1067 1056 1049 977 964 917 1034 963 1071 1236 1316 1477 1628 1928 2090 2115 1943 1715 1525 1332 1346 1373 1375 1474 1490 1406 1238 1076 994 885 851 813 804 834 780 796 713 717 689 698 803 719 735 653 718 690 617 692 702 687 671 684 742 695 720 714 716 688 701 725 681 725 642 709 713 600 680 659 648 601 642 657 682 629 689 638 685 601 635 662 665 669 666 645 682 641 663 644 672 698 669 639 636 637 697 677 603 616 669 648 619 663 621 608 678 672 663 640 667 655 683 638 631 623 626 655 598 611 622 589 642 626 636 642 668 579 639 588 667 638 685 649 609 605 671 625 608 630 622 633 627 630 629 677 647 652 623 646 622 629 621 583 682 656 630 662 659 611 596 593 590 604 585 568 676 657 605 643 638 657 598 607 597 645 578 620 590 587 635 590 644 625 601 596 656 648 612 635 653 663 646 631 631 684 646 706 664 695 714 652 657 705 702 600 691 693 708 648 694 638 651 669 643 640 705 651 728 638 611 610 578 697 729 662 686 667 690 640 615 643 638 599 665 609 635 619 614 595 582 623 612 578 614 617 587 608 586 596 564 608 600 625 597 554 592 622 602 588 624 581 589 664 610 586 659 583 598 605 607 587 584 610 600 589 593 587 605 598 581 576 580 607 615 593 641 560 585 614 619 594 593 562 598 585 551 563 605 581 644 568 578 613 616 573 609 634 601 600 593 607 606 610 596 595 624 649 631 614 642 588 592 630 664 623 610 587 598 571 580 582 595 581 604 589 564 544 557 581 593 560 608 570 573 583 602 587 574 576 586 609 588 609 593 550 588 574 577 595 576 548 557 559 553 583 514 514 602 506 564 564 619 581 578 595 580 581 524 579 596 540 588 588 540 543 557 586 578 544 573 574 581 539 572 573 607 579 567 547 540 589 619 584 542 551 548 548 571 591 574 518 525 534 534 542 547 573 548 549 521 572 560 510 569 550 589 558 547 584 552 564 548 538 591 539 585 564 598 557 566 583 551 532 581 593 497 553 621 590 565 589 593 580 570 515 565 531 553 542 583 576 545 583 530 534 558 580 548 553 580 582 541 564 541 547 549 532 566 535 536 543 560 536 550 566 530 455 525 530 550 578 533 585 576 562 525 521 542 522 575 548 529 603 522 515 510 522 571 582 521 548 577 575 588 570 589 593 604 577 599 584 612 579 615 621 577 625 579 574 551 546 592 578 558 602 567 566 559 564 551 559 534 553 613 568 498 536 504 531 544 493 563 518 527 538 539 551 534 553 529 602 533 530 542 528 541 582 586 541 512 602 540 504 507 557 516 552 527 548 539 512 504 506 539 529 527 530 552 522 516 540 510 544 550 521 556 578 517 594 520 576 490 582 563 543 588 603 570 558 561 580 521 533 542 604 512 573 549 551 548 542 546 545 536 541 572 523 600 580 577 570 559 575 561 607 591 545 601 625 571 639 647 612 617 655 690 756 864 895 957 1087 1232 1266 1418 1328 1237 1165 1057 1027 1008 944 929 1025 1043 1038 989 977 851 897 739 761 670 684 671 660 615 601 607 600 575 602 594 608 576 584 619 577 564 630 665 667 687 741 719 758 768 786 873 915 859 870 846 791 737 729 713 667 752 747 739 689 704 704 678 604 643 588 583 520 546 567 578 577 570 579 607 616 597 575 572 587 595 596 624 568 598 580 598 600 624 624 596 520 572 602 560 529 490 567 612 596 545 578 558 522 543 574 578 591 522 488 528 524 555 509 554 525 525 495 533 550 487 511 518 508 547 528 523 498 547 512 497 542 545 549 503 523 538 559 524 559 523 541 553 543 535 525 489 564 587 541 530 575 501 533 535 530 518 495 565 566 544 530 540 510 548 527 530 508 523 527 562 516 529 473 543 553 498 516 489 517 572 557 509 527 533 510 586 546 544 527 583 567 552 503 563 548 517 534 531 555 542 563 553 555 587 553 559 579 625 600 602 624 574 623 694 643 733 749 841 893 1000 974 1011 925 848 760 725 720 702 706 741 810 784 828 823 761 732 678 668 670 593 634 647 621 577 675 640 644 660 671 633 634 608 601 631 627 642 624 566 620 617 632 594 651 643 657 648 620 585 593 609 667 537 591 590 589 609 642 607 591 591 555 544 622 571 607 585 554 594 595 612 585 584 572 632 595 535 547 559 570 562 542 537 532 567 541 592 568 567 586 527 555 591 480 541 506 573 544 584 572 512 562 536 560 533 521 574 478 466 550 508 505 513 525 497 541 543 530 575 558 522 497 519 488 574 571 476 516 537 479 514 503 482 512 512 489 557 505 476 512 537 493 476 509 507 505 517 539 522 495 473 509 518 502 501 528 511 555 525 512 502 528 483 516 522 501 521 496 545 503 519 505 504 509 480 536 536 508 518 551 508 497 538 509 497 501 525 499 532 484 508 504 518 497 513 531 492 477 530 527 496 553 536 513 486 465 519 458 446 490 467 497 482 499 506 494 531 502 509 487 480 482 485 485 508 468 475 489 473 508 504 503 494 486 490 483 474 474 528 482 504 497 491 477 504 498 488 484 505 472 502 426 514 487 506 540 519 488 486 524 457 473 484 466 501 469 498 503 528 430 467 497 481 509 505 525 500 457 495 493 520 492 459 482 512 487 485 467 452 474 481 431 491 466 492 505 515 512 467 477 489 565 476 490 482 488 468 482 507 460 494 511 514 470 401 436 498 462 462 471 480 496 447 494 497 449 497 438 465 466 460 468 472 483 468 465 460 496 494 460 479 472 477 476 484 487 470 473 461 439 466 471 492 472 463 438 409 477 507 444 478 440 467 449 446 461 483 456 494 462 462 510 459 527 474 469 477 480 467 464 444 505 435 459 463 528 525 509 521 510 486 564 536 479 514 480 498 499 501 508 436 485 464 522 519 500 524 512 462 459 470 492 446 477 450 468 469 465 466 426 475 472 472 489 449 446 460 471 462 455 468 440 484 497 431 471 445 492 500 465 464 462 466 449 524 482 471 427 489 486 497 461 424 462 482 417 436 494 470 451 483 446 446 460 491 470 465 496 449 494 494 451 447 477 451 464 463 443 430 477 508 416 476 448 445 486 471 449 478 447 473 470 423 476 457 452 459 454 552 449 468 467 444 441 435 419 436 476 433 424 509 416 459 434 459 462 491 457 466 456 448 460 468 448 439 437 449 425 447 443 474 450 491 417 444 405 467 454 461 459 466 477 440 448 480 468 422 404 464 418 493 464 453 472 460 501 461 447 462 466 432 453 419 412 450 427 415 470 450 454 458 413 411 477 476 470 465 497 437 437 498 472 492 435 494 458 453 473 459 443 436 467 438 392 419 445 448 435 436 453 406 502 486 503 427 432 482 425 451 433 432 446 461 421 451 413 479 446 419 457 429 458 450 449 434 433 491 481 450 465 428 447 465 464 458 426 416 484 479 435 438 425 454 431 483 455 460 451 462 441 443 460 443 430 444 457 444 432 471 436 414 474 452 437 462 457 406 459 465 449 450 452 441 442 456 479 412 440 474 424 435 432 429 429 449 450 456 439 456 464 431 458 460 409 434 443 451 442 438 426 476 448 409 435 445 415 422 434 398 479 465 477 425 442 460 441 423 447 405 477 406 443 414 440 474 450 461 440 440 479 418 474 511 465 472 460 458 454 472 466 513 520 498 511 501 576 509 532 473 479 487 517 499 452 493 487 474 513 444 493 478 485 484 472 484 483 461 497 494 460 424 467 460 425 448 473 413 430 426 440 417 463 444 445 421 473 423 421 449 433 485 441 455 424 460 460 399 424 455 446 455 437 444 406 453 443 456 429 436 459 443 453 438 469 452 432 420 462 403 471 504 465 440 493 478 469 454 475 446 446 466 526 481 436 422 446 508 456 429 471 425 450 405 431 446 469 460 459 455 425 486 465 510 438 405 430 460 445 423 413 434 443 430 439 417 394 471 472 458 449 445 428 431 457 406 458 452 434 431 432 426 446 445 478 439 399 439 434 418 381 423 441 470 431 421 459 412 427 440 463 392 419 431 407 437 457 431 407 412 430 423 439 459 454 419 451 430 465 452 445 422 435 404 441 416 448 420 442 464 419 431 431 439 443 439 457 423 467 420 413 395 421 386 456 418 434 438 424 424 442 446 448 426 425 436 464 433 419 464 416 440 418 440 444 399 433 456 504 446 477 459 451 461 451 426 437 472 434 449 445 453 430 425 428 450 453 461 481 423 442 453 433 445 430 403 413 454 439 414 465 397 416 430 423 404 379 419 424 413 428 400 408 429 475 431 425 399 453 409 353 421 417 423 402 425 401 439 383 405 412 417 410 409 420 425 462 430 423 489 435 420 422 420 412 430 393 420 414 390 409 454 402 422 392 419 385 433 428 438 401 419 441 402 405 417 444 438 417 395 428 380 450 415 403 437 425 406 409 444 405 398 387 427 387 396 379 436 422 428 441 422 416 403 438 443 419 425 438 428 397 427 428 408 424 428 415 429 374 474 439 449 427 415 428 419 407 432 419 431 455 439 446 426 438 449 446 473 430 419 451 417 404 469 453 491 457 456 472 481 482 460 492 488 480 464 507 471 508 511 530 534 531 538 553 569 545 546 578 597 602 588 606 592 650 655 658 642 687 679 727 676 676 706 753 753 737 759 716 696 754 790 771 778 797 807 757 734 805 724 691 764 721 767 794 746 678 754 723 717 662 691 675 634 616 655 638 651 633 676 614 537 578 565 569 537 606 554 533 565 520 568 535 461 517 482 538 473 487 529 490 474 461 440 451 478 455 419 438 434 420 445 460 430 425 440 488 455 457 487 423 422 464 425 448 441 429 405 435 457 436 455 446 400 412 440 426 498 413 428 393 423 401 412 379 435 449 494 475 423 444 440 487 488 526 522 487 506 532 497 467 465 472 470 455 469 409 484 503 469 437 459 483 461 467 481 491 473 517 441 419 458 469 480 460 473 426 466 487 491 506 511 543 602 662 651 651 563 558 506 503 507 461 427 474 462 512 463 522 498 481 532 534 493 472 454 456 426 446 440 452 394 410 412 434 423 416 361 378 390 416 391 390 391 384 366 394 401 360 393 424 408 346 384 404 381 397 383 385 398 415 392 390 388 405 395 383 399 397 385 398 386 371 386 386 378 356 369 392 351 411 383 468 365 416 392 374 420 380 395 370 403 349 378 371 410 380 357 387 393 374 405 398 388 398 376 384 401 356 381 400 389 384 395 344 397 406 370 402 400 433 430 376 369 403 399 384 382 379 389 430 370 402 372 406 424 374 401 424 466 365 412 415 403 409 412 414 384 393 388 390 407 398 425 415 414 424 401 369 422 384 392 396 373 355 380 421 359 356 408 443 369 428 423 389 370 372 367 371 400 372 402 382 363 357 360 428 391 357 379 382 392 365 380 366 392 385 366 389 430 376 402 389 390 413 363 375 414 358 382 392 363 393 423 395 439 412 373 364 392 402 351 379 424 410 370 356 378 372 369 369 405 393 419 367 404 383 387 387 376 375 390 343 382 364 411 362 372 388 345 351 392 396 388 349 372 391 396 388 347 375 350 379 375 381 373 397 379 392 342 373 378 325 400 352 388 349 364 389 345 358 382 360 379 355 359 348 369 412 374 348 429 361 399 353 391 368 385 357 376 358 355 383 354 346 351 373 402 364 361 435 375 405 391 423 381 359 351 378 362 406 368 335 404 334 384 380 356 359 365 397 398 348 342 396 365 357 354 365 388 365 342 404 378 372 333 344 362 397 393 346 333 384 387 365 357 394 347 + +
+
+
diff --git a/tests/data/eln_mapper/eln.yaml b/tests/data/eln_mapper/eln.yaml new file mode 100644 index 000000000..6a6884017 --- /dev/null +++ b/tests/data/eln_mapper/eln.yaml @@ -0,0 +1,103 @@ +Data: + '@signal': null + data: + value: null + unit: null +Instrument: + Beam: + distance: + value: null + unit: null + incident_energy: + value: null + unit: null + incident_energy_spread: + value: null + unit: null + incident_polarization: + value: null + unit: null + Electronanalyser: + Collectioncolumn: + contrast_aperture: null + field_aperture: null + mode: null + projection: null + scheme: null + Detector: + Data: + '@signal': null + raw: null + amplifier_type: null + detector_type: null + Energydispersion: + energy_scan_mode: null + entrance_slit: null + exit_slit: null + pass_energy: + value: null + unit: null + scheme: null + description: null + energy_resolution: + value: null + unit: null + fast_axes: null + slow_axes: null + Manipulator: + drain_current: + value: null + unit: null + sample_bias: + value: null + unit: null + sample_temperature: + value: null + unit: null + Source: + name: null + probe: null + type: null + energy_resolution: + value: null + unit: null +Process: + angular_calibration: + applied: null + calibrated_axis: null + energy_calibration: + applied: null + calibrated_axis: null + momentum_calibration: + applied: null + calibrated_axis: null + spatial_calibration: + applied: null + calibrated_axis: null +Sample: + atom_types: null + bias: + value: null + unit: null + chemical_formula: null + gas_pressure: + value: null + unit: null + name: null + preparation_date: null + preparation_description: null + sample_history: null + situation: null + temperature: + value: null + unit: null +User: + address: null + affiliation: null + email: null + name: null + orcid: null +definition: + '@version': null +start_time: null +title: null diff --git a/tests/data/eln_mapper/mpes.scheme.archive.yaml b/tests/data/eln_mapper/mpes.scheme.archive.yaml new file mode 100644 index 000000000..0f704ced9 --- /dev/null +++ b/tests/data/eln_mapper/mpes.scheme.archive.yaml @@ -0,0 +1,537 @@ +definitions: + name: + sections: + mpes: + base_sections: + - nomad.datamodel.metainfo.eln.NexusDataConverter + - nomad.datamodel.data.EntryData + m_annotations: + template: + reader: + nxdl: NX.nxdl + eln: + hide: [] + quantities: + title: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + start_time: + type: Datetime + m_annotations: + eln: + component: DateTimeEditQuantity + defaultDisplayUnit: + description: ' Datetime of the start of the measurement. ' + definition: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + sub_sections: + User: + section: + m_annotations: + eln: + overview: true + quantities: + name: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Name of the user. ' + affiliation: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Name of the affiliation of the user at the point in + time when the experiment was performed. ' + address: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Full address (street, street number, ZIP, city, country) + of the user''s affiliation. ' + email: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Email address of the user. ' + orcid: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Author ID defined by https://orcid.org/. ' + Instrument: + section: + m_annotations: + eln: + overview: true + quantities: + energy_resolution: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + sub_sections: + Source: + section: + m_annotations: + eln: + overview: true + quantities: + type: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + name: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + probe: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Type of probe. In photoemission it''s always + photons, so the full NIAC list is restricted. ' + Beam: + section: + m_annotations: + eln: + overview: true + quantities: + distance: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' Distance of the point of evaluation of the beam + from the sample surface. ' + incident_energy: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + incident_energy_spread: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + incident_polarization: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + Electronanalyser: + section: + m_annotations: + eln: + overview: true + quantities: + description: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + energy_resolution: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' Energy resolution of the analyser with the current + setting. May be linked from a NXcalibration. ' + fast_axes: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + slow_axes: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + sub_sections: + Collectioncolumn: + section: + m_annotations: + eln: + overview: true + quantities: + scheme: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Scheme of the electron collection column. ' + mode: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + projection: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + sub_sections: + Field_aperture: + section: + m_annotations: + eln: + overview: true + Contrast_aperture: + section: + m_annotations: + eln: + overview: true + Energydispersion: + section: + m_annotations: + eln: + overview: true + quantities: + scheme: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + pass_energy: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + energy_scan_mode: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + sub_sections: + Entrance_slit: + section: + m_annotations: + eln: + overview: true + Exit_slit: + section: + m_annotations: + eln: + overview: true + Detector: + section: + m_annotations: + eln: + overview: true + quantities: + amplifier_type: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Type of electron amplifier in the first + amplification step. ' + detector_type: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' Description of the detector type. ' + sub_sections: + Data: + section: + m_annotations: + eln: + overview: true + quantities: + raw: + type: np.float64 + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' Raw data before calibration. ' + Manipulator: + section: + m_annotations: + eln: + overview: true + quantities: + sample_temperature: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + drain_current: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + sample_bias: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + Process: + section: + m_annotations: + eln: + overview: true + sub_sections: + Energy_calibration: + section: + m_annotations: + eln: + overview: true + quantities: + applied: + type: bool + m_annotations: + eln: + component: BoolEditQuantity + defaultDisplayUnit: + description: ' Has an energy calibration been applied? ' + calibrated_axis: + type: np.float64 + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' This is the calibrated energy axis to be used + for data plotting. ' + Angular_calibration: + section: + m_annotations: + eln: + overview: true + quantities: + applied: + type: bool + m_annotations: + eln: + component: BoolEditQuantity + defaultDisplayUnit: + description: ' Has an angular calibration been applied? ' + calibrated_axis: + type: np.float64 + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' This is the calibrated angular axis to be used + for data plotting. ' + Spatial_calibration: + section: + m_annotations: + eln: + overview: true + quantities: + applied: + type: bool + m_annotations: + eln: + component: BoolEditQuantity + defaultDisplayUnit: + description: ' Has an spatial calibration been applied? ' + calibrated_axis: + type: np.float64 + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' This is the calibrated spatial axis to be used + for data plotting. ' + Momentum_calibration: + section: + m_annotations: + eln: + overview: true + quantities: + applied: + type: bool + m_annotations: + eln: + component: BoolEditQuantity + defaultDisplayUnit: + description: ' Has an momentum calibration been applied? ' + calibrated_axis: + type: np.float64 + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' This is the momentum axis to be used for data + plotting. ' + Sample: + section: + m_annotations: + eln: + overview: true + quantities: + name: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + chemical_formula: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' The chemical formula of the sample. For mixtures use + the NXsample_component group in NXsample instead. ' + atom_types: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: ' List of comma-separated elements from the periodic + table that are contained in the sample. If the sample substance + has multiple components, all elements from each component must be + included in `atom_types`. ' + preparation_date: + type: Datetime + m_annotations: + eln: + component: DateTimeEditQuantity + defaultDisplayUnit: + description: ' Date of preparation of the sample for the XPS experiment + (i.e. cleaving, last annealing). ' + temperature: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' In the case of a fixed temperature measurement this + is the scalar temperature of the sample. In the case of an experiment + in which the temperature is changed and recoded, this is an array + of length m of temperatures. This should be a link to /entry/instrument/manipulator/sample_temperature. ' + situation: + type: str + m_annotations: + eln: + component: StringEditQuantity + defaultDisplayUnit: + description: '' + gas_pressure: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: '' + sub_sections: + Sample_history: + section: + m_annotations: + eln: + overview: true + Preparation_description: + section: + m_annotations: + eln: + overview: true + Data: + section: + m_annotations: + eln: + overview: true + quantities: + data: + type: np.float64 + unit: '' + value: + m_annotations: + eln: + component: NumberEditQuantity + defaultDisplayUnit: + description: ' Represents a measure of one- or more-dimensional photoemission + counts, where the varied axis may be for example energy, momentum, + spatial coordinate, pump-probe delay, spin index, temperature, etc. + The axes traces should be linked to the actual encoder position + in NXinstrument or calibrated axes in NXprocess. ' diff --git a/tests/data/nexus/NXtest2.nxdl.xml b/tests/data/nexus/NXtest2.nxdl.xml new file mode 100644 index 000000000..7b33b2165 --- /dev/null +++ b/tests/data/nexus/NXtest2.nxdl.xml @@ -0,0 +1,455 @@ + + + + + + + Characterization of a sample during a session on an electron microscope. + + + + + + + + Metadata and numerical data of the microscope and the lab in which it stands. + + + + + + Given name of the microscope at the hosting institution. This is an alias. + Examples could be NionHermes, Titan, JEOL, Gemini, etc. + + + + + Location of the lab or place where the instrument is installed. + Using GEOREF is preferred. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + If the lens is described at least one of the fields + voltage, current, or value should be defined. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Description of the type of the detector. + + Electron microscopes have typically multiple detectors. + Different technologies are in use like CCD, scintillator, + direct electron, CMOS, or image plate to name but a few. + + + + Instrument-specific alias/name + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A container for storing a set of NXevent_data_em instances. + + + + + \ No newline at end of file diff --git a/tests/data/nexus/Ref_nexus_test.log b/tests/data/nexus/Ref_nexus_test.log index 0b9f8bebd..ec7214cc4 100644 --- a/tests/data/nexus/Ref_nexus_test.log +++ b/tests/data/nexus/Ref_nexus_test.log @@ -8,12 +8,13 @@ DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY): DEBUG - DEBUG - documentation (NXentry.nxdl.xml:): DEBUG - - (**required**) :ref:`NXentry` describes the measurement. - - The top-level NeXus group which contains all the data and associated - information that comprise a single measurement. - It is mandatory that there is at least one - group of this type in the NeXus file. + (**required**) :ref:`NXentry` describes the measurement. + + The top-level NeXus group which contains all the data and associated + information that comprise a single measurement. + It is mandatory that there is at least one + group of this type in the NeXus file. + DEBUG - ===== ATTRS (//entry@NX_class) DEBUG - value: NXentry DEBUG - classpath: ['NXentry'] @@ -30,9 +31,9 @@ NXentry.nxdl.xml:/collection_time DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/collection_time): DEBUG - - Time transpired actually collecting data i.e. taking out time when collection was - suspended due to e.g. temperature out of range - + Time transpired actually collecting data i.e. taking out time when collection was + suspended due to e.g. temperature out of range + DEBUG - ===== ATTRS (//entry/collection_time@units) DEBUG - value: s DEBUG - classpath: ['NXentry', 'NX_FLOAT'] @@ -50,34 +51,33 @@ DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/DATA): DEBUG - DEBUG - documentation (NXentry.nxdl.xml:/DATA): DEBUG - - The data group - - .. note:: Before the NIAC2016 meeting [#]_, at least one - :ref:`NXdata` group was required in each :ref:`NXentry` group. - At the NIAC2016 meeting, it was decided to make :ref:`NXdata` - an optional group in :ref:`NXentry` groups for data files that - do not use an application definition. - It is recommended strongly that all NeXus data files provide - a NXdata group. - It is permissable to omit the NXdata group only when - defining the default plot is not practical or possible - from the available data. - - For example, neutron event data may not have anything that - makes a useful plot without extensive processing. - - Certain application definitions override this decision and - require an :ref:`NXdata` group - in the :ref:`NXentry` group. The ``minOccurs=0`` attribute - in the application definition will indicate the - :ref:`NXdata` group - is optional, otherwise, it is required. - - .. [#] NIAC2016: - https://www.nexusformat.org/NIAC2016.html, - https://github.com/nexusformat/NIAC/issues/16 - - + The data group + + .. note:: Before the NIAC2016 meeting [#]_, at least one + :ref:`NXdata` group was required in each :ref:`NXentry` group. + At the NIAC2016 meeting, it was decided to make :ref:`NXdata` + an optional group in :ref:`NXentry` groups for data files that + do not use an application definition. + It is recommended strongly that all NeXus data files provide + a NXdata group. + It is permissable to omit the NXdata group only when + defining the default plot is not practical or possible + from the available data. + + For example, neutron event data may not have anything that + makes a useful plot without extensive processing. + + Certain application definitions override this decision and + require an :ref:`NXdata` group + in the :ref:`NXentry` group. The ``minOccurs=0`` attribute + in the application definition will indicate the + :ref:`NXdata` group + is optional, otherwise, it is required. + + .. [#] NIAC2016: + https://www.nexusformat.org/NIAC2016.html, + https://github.com/nexusformat/NIAC/issues/16 + DEBUG - documentation (NXdata.nxdl.xml:): DEBUG - :ref:`NXdata` describes the plottable data and related dimension scales. @@ -395,21 +395,21 @@ DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/definition): DEBUG - Official NeXus NXDL schema to which this file conforms. DEBUG - documentation (NXentry.nxdl.xml:/definition): DEBUG - - (alternate use: see same field in :ref:`NXsubentry` for preferred) - - Official NeXus NXDL schema to which this entry conforms which must be - the name of the NXDL file (case sensitive without the file extension) - that the NXDL schema is defined in. - - For example the ``definition`` field for a file that conformed to the - *NXarpes.nxdl.xml* definition must contain the string **NXarpes**. - - This field is provided so that :ref:`NXentry` can be the overlay position - in a NeXus data file for an application definition and its - set of groups, fields, and attributes. - - *It is advised* to use :ref:`NXsubentry`, instead, as the overlay position. - + (alternate use: see same field in :ref:`NXsubentry` for preferred) + + Official NeXus NXDL schema to which this entry conforms which must be + the name of the NXDL file (case sensitive without the file extension) + that the NXDL schema is defined in. + + For example the ``definition`` field for a file that conformed to the + *NXarpes.nxdl.xml* definition must contain the string **NXarpes**. + + This field is provided so that :ref:`NXentry` can be the overlay position + in a NeXus data file for an application definition and its + set of groups, fields, and attributes. + + *It is advised* to use :ref:`NXsubentry`, instead, as the overlay position. + DEBUG - ===== FIELD (//entry/duration): DEBUG - value: 7200 DEBUG - classpath: ['NXentry', 'NX_INT'] @@ -417,7 +417,9 @@ DEBUG - classes: NXentry.nxdl.xml:/duration DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/duration): -DEBUG - Duration of measurement +DEBUG - + Duration of measurement + DEBUG - ===== ATTRS (//entry/duration@units) DEBUG - value: s DEBUG - classpath: ['NXentry', 'NX_INT'] @@ -431,7 +433,9 @@ DEBUG - classes: NXentry.nxdl.xml:/end_time DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/end_time): -DEBUG - Ending time of measurement +DEBUG - + Ending time of measurement + DEBUG - ===== FIELD (//entry/entry_identifier): DEBUG - value: b'Run 22118' DEBUG - classpath: ['NXentry', 'NX_CHAR'] @@ -439,7 +443,9 @@ DEBUG - classes: NXentry.nxdl.xml:/entry_identifier DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/entry_identifier): -DEBUG - unique identifier for the measurement, defined by the facility. +DEBUG - + unique identifier for the measurement, defined by the facility. + DEBUG - ===== FIELD (//entry/experiment_identifier): DEBUG - value: b'F-20170538' DEBUG - classpath: ['NXentry', 'NX_CHAR'] @@ -448,10 +454,10 @@ NXentry.nxdl.xml:/experiment_identifier DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/experiment_identifier): DEBUG - - Unique identifier for the experiment, - defined by the facility, - possibly linked to the proposals - + Unique identifier for the experiment, + defined by the facility, + possibly linked to the proposals + DEBUG - ===== GROUP (//entry/instrument [NXarpes::/NXentry/NXinstrument]): DEBUG - classpath: ['NXentry', 'NXinstrument'] DEBUG - classes: @@ -465,15 +471,15 @@ DEBUG - documentation (NXentry.nxdl.xml:/INSTRUMENT): DEBUG - DEBUG - documentation (NXinstrument.nxdl.xml:): DEBUG - - Collection of the components of the instrument or beamline. - - Template of instrument descriptions comprising various beamline components. - Each component will also be a NeXus group defined by its distance from the - sample. Negative distances represent beamline components that are before the - sample while positive distances represent components that are after the sample. - This device allows the unique identification of beamline components in a way - that is valid for both reactor and pulsed instrumentation. - + Collection of the components of the instrument or beamline. + + Template of instrument descriptions comprising various beamline components. + Each component will also be a NeXus group defined by its distance from the + sample. Negative distances represent beamline components that are before the + sample while positive distances represent components that are after the sample. + This device allows the unique identification of beamline components in a way + that is valid for both reactor and pulsed instrumentation. + DEBUG - ===== ATTRS (//entry/instrument@NX_class) DEBUG - value: NXinstrument DEBUG - classpath: ['NXentry', 'NXinstrument'] @@ -496,8 +502,8 @@ DEBUG - documentation (NXinstrument.nxdl.xml:/DETECTOR): DEBUG - DEBUG - documentation (NXdetector.nxdl.xml:): DEBUG - - A detector, detector bank, or multidetector. - + A detector, detector bank, or multidetector. + DEBUG - ===== ATTRS (//entry/instrument/analyser@NX_class) DEBUG - value: NXdetector DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector'] @@ -527,12 +533,19 @@ DEBUG - -> decimated DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/analyser/acquisition_mode): DEBUG - DEBUG - documentation (NXdetector.nxdl.xml:/acquisition_mode): -DEBUG - The acquisition mode of the detector. +DEBUG - + The acquisition mode of the detector. + DEBUG - ===== FIELD (//entry/instrument/analyser/amplifier_type): DEBUG - value: b'MCP' -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector', 'NX_CHAR'] +DEBUG - classes: +NXdetector.nxdl.xml:/amplifier_type +DEBUG - <> +DEBUG - documentation (NXdetector.nxdl.xml:/amplifier_type): DEBUG - + Type of electron amplifier, MCP, channeltron, etc. + DEBUG - ===== FIELD (//entry/instrument/analyser/angles): DEBUG - value: [-1.96735314 -1.91500657 -1.86266001 -1.81031344 -1.75796688 -1.70562031 ... DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector', 'NX_NUMBER'] @@ -574,29 +587,29 @@ DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/analyser/data): DEBUG - DEBUG - documentation (NXdetector.nxdl.xml:/data): DEBUG - - Data values from the detector. The rank and dimension ordering should follow a principle of - slowest to fastest measurement axes and may be explicitly specified in application definitions. - - Mechanical scanning of objects (e.g. sample position/angle, incident beam energy, etc) tends to be - the slowest part of an experiment and so any such scan axes should be allocated to the first dimensions - of the array. Note that in some cases it may be useful to represent a 2D set of scan points as a single - scan-axis in the data array, especially if the scan pattern doesn't fit a rectangular array nicely. - Repetition of an experiment in a time series tends to be used similar to a slow scan axis - and so will often be in the first dimension of the data array. - - The next fastest axes are typically the readout of the detector. A point detector will not add any dimensions - (as it is just a single value per scan point) to the data array, a strip detector will add one dimension, an - imaging detector will add two dimensions (e.g. X, Y axes) and detectors outputting higher dimensional data - will add the corresponding number of dimensions. Note that the detector dimensions don't necessarily have to - be written in order of the actual readout speeds - the slowest to fastest rule principle is only a guide. - - Finally, detectors that operate in a time-of-flight mode, such as a neutron spectrometer or a silicon drift - detector (used for X-ray fluorescence) tend to have their dimension(s) added to the last dimensions in the data array. - - The type of each dimension should should follow the order of scan points, detector pixels, - then time-of-flight (i.e. spectroscopy, spectrometry). The rank and dimension sizes (see symbol list) - shown here are merely illustrative of coordination between related datasets. - + Data values from the detector. The rank and dimension ordering should follow a principle of + slowest to fastest measurement axes and may be explicitly specified in application definitions. + + Mechanical scanning of objects (e.g. sample position/angle, incident beam energy, etc) tends to be + the slowest part of an experiment and so any such scan axes should be allocated to the first dimensions + of the array. Note that in some cases it may be useful to represent a 2D set of scan points as a single + scan-axis in the data array, especially if the scan pattern doesn't fit a rectangular array nicely. + Repetition of an experiment in a time series tends to be used similar to a slow scan axis + and so will often be in the first dimension of the data array. + + The next fastest axes are typically the readout of the detector. A point detector will not add any dimensions + (as it is just a single value per scan point) to the data array, a strip detector will add one dimension, an + imaging detector will add two dimensions (e.g. X, Y axes) and detectors outputting higher dimensional data + will add the corresponding number of dimensions. Note that the detector dimensions don't necessarily have to + be written in order of the actual readout speeds - the slowest to fastest rule principle is only a guide. + + Finally, detectors that operate in a time-of-flight mode, such as a neutron spectrometer or a silicon drift + detector (used for X-ray fluorescence) tend to have their dimension(s) added to the last dimensions in the data array. + + The type of each dimension should should follow the order of scan points, detector pixels, + then time-of-flight (i.e. spectroscopy, spectrometry). The rank and dimension sizes (see symbol list) + shown here are merely illustrative of coordination between related datasets. + DEBUG - ===== ATTRS (//entry/instrument/analyser/data@target) DEBUG - value: /entry/instrument/analyser/data DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector', 'NX_NUMBER'] @@ -630,9 +643,14 @@ DEBUG - NOT IN SCHEMA DEBUG - DEBUG - ===== FIELD (//entry/instrument/analyser/detector_type): DEBUG - value: b'DLD' -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector', 'NX_CHAR'] +DEBUG - classes: +NXdetector.nxdl.xml:/detector_type +DEBUG - <> +DEBUG - documentation (NXdetector.nxdl.xml:/detector_type): DEBUG - + Description of the detector type, DLD, Phosphor+CCD, CMOS. + DEBUG - ===== FIELD (//entry/instrument/analyser/dispersion_scheme): DEBUG - value: b'Time of flight' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector'] @@ -776,9 +794,14 @@ DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/analyser/region_size): DEBUG - size of rectangular region selected for readout DEBUG - ===== FIELD (//entry/instrument/analyser/sensor_count): DEBUG - value: 4 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector', 'NX_INT'] +DEBUG - classes: +NXdetector.nxdl.xml:/sensor_count +DEBUG - <> +DEBUG - documentation (NXdetector.nxdl.xml:/sensor_count): DEBUG - + Number of imaging sensor chips on the detector. + DEBUG - ===== FIELD (//entry/instrument/analyser/sensor_size): DEBUG - value: [ 80 146] DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXdetector', 'NX_INT'] @@ -821,22 +844,22 @@ DEBUG - documentation (NXinstrument.nxdl.xml:/BEAM): DEBUG - DEBUG - documentation (NXbeam.nxdl.xml:): DEBUG - - Properties of the neutron or X-ray beam at a given location. - - This group is intended to be referenced - by beamline component groups within the :ref:`NXinstrument` group or by the :ref:`NXsample` group. This group is - especially valuable in storing the results of instrument simulations in which it is useful - to specify the beam profile, time distribution etc. at each beamline component. Otherwise, - its most likely use is in the :ref:`NXsample` group in which it defines the results of the neutron - scattering by the sample, e.g., energy transfer, polarizations. Finally, There are cases where the beam is - considered as a beamline component and this group may be defined as a subgroup directly inside - :ref:`NXinstrument`, in which case it is recommended that the position of the beam is specified by an - :ref:`NXtransformations` group, unless the beam is at the origin (which is the sample). - - Note that incident_wavelength and related fields can be a scalar values or arrays, depending on the use case. - To support these use cases, the explicit dimensionality of these fields is not specified, but it can be inferred - by the presense of and shape of accompanying fields, such as incident_wavelength_weights for a polychromatic beam. - + Properties of the neutron or X-ray beam at a given location. + + This group is intended to be referenced + by beamline component groups within the :ref:`NXinstrument` group or by the :ref:`NXsample` group. This group is + especially valuable in storing the results of instrument simulations in which it is useful + to specify the beam profile, time distribution etc. at each beamline component. Otherwise, + its most likely use is in the :ref:`NXsample` group in which it defines the results of the neutron + scattering by the sample, e.g., energy transfer, polarizations. Finally, There are cases where the beam is + considered as a beamline component and this group may be defined as a subgroup directly inside + :ref:`NXinstrument`, in which case it is recommended that the position of the beam is specified by an + :ref:`NXtransformations` group, unless the beam is at the origin (which is the sample). + + Note that incident_wavelength and related fields can be a scalar values or arrays, depending on the use case. + To support these use cases, the explicit dimensionality of these fields is not specified, but it can be inferred + by the presense of and shape of accompanying fields, such as incident_wavelength_weights for a polychromatic beam. + DEBUG - ===== ATTRS (//entry/instrument/beam_probe_0@NX_class) DEBUG - value: NXbeam DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] @@ -852,7 +875,9 @@ DEBUG - classes: NXbeam.nxdl.xml:/distance DEBUG - <> DEBUG - documentation (NXbeam.nxdl.xml:/distance): -DEBUG - Distance from sample. Note, it is recommended to use NXtransformations instead. +DEBUG - + Distance from sample. Note, it is recommended to use NXtransformations instead. + DEBUG - ===== ATTRS (//entry/instrument/beam_probe_0/distance@units) DEBUG - value: cm DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] @@ -886,14 +911,20 @@ DEBUG - NOT IN SCHEMA DEBUG - DEBUG - ===== FIELD (//entry/instrument/beam_probe_0/pulse_duration): DEBUG - value: 70 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_duration +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/pulse_duration): DEBUG - + FWHM duration of the pulses at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_probe_0/pulse_duration@units) DEBUG - value: fs -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_duration +DEBUG - NXbeam.nxdl.xml:/pulse_duration@units [NX_TIME] DEBUG - ===== FIELD (//entry/instrument/beam_probe_0/size_x): DEBUG - value: 500 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] @@ -924,22 +955,22 @@ DEBUG - documentation (NXinstrument.nxdl.xml:/BEAM): DEBUG - DEBUG - documentation (NXbeam.nxdl.xml:): DEBUG - - Properties of the neutron or X-ray beam at a given location. - - This group is intended to be referenced - by beamline component groups within the :ref:`NXinstrument` group or by the :ref:`NXsample` group. This group is - especially valuable in storing the results of instrument simulations in which it is useful - to specify the beam profile, time distribution etc. at each beamline component. Otherwise, - its most likely use is in the :ref:`NXsample` group in which it defines the results of the neutron - scattering by the sample, e.g., energy transfer, polarizations. Finally, There are cases where the beam is - considered as a beamline component and this group may be defined as a subgroup directly inside - :ref:`NXinstrument`, in which case it is recommended that the position of the beam is specified by an - :ref:`NXtransformations` group, unless the beam is at the origin (which is the sample). - - Note that incident_wavelength and related fields can be a scalar values or arrays, depending on the use case. - To support these use cases, the explicit dimensionality of these fields is not specified, but it can be inferred - by the presense of and shape of accompanying fields, such as incident_wavelength_weights for a polychromatic beam. - + Properties of the neutron or X-ray beam at a given location. + + This group is intended to be referenced + by beamline component groups within the :ref:`NXinstrument` group or by the :ref:`NXsample` group. This group is + especially valuable in storing the results of instrument simulations in which it is useful + to specify the beam profile, time distribution etc. at each beamline component. Otherwise, + its most likely use is in the :ref:`NXsample` group in which it defines the results of the neutron + scattering by the sample, e.g., energy transfer, polarizations. Finally, There are cases where the beam is + considered as a beamline component and this group may be defined as a subgroup directly inside + :ref:`NXinstrument`, in which case it is recommended that the position of the beam is specified by an + :ref:`NXtransformations` group, unless the beam is at the origin (which is the sample). + + Note that incident_wavelength and related fields can be a scalar values or arrays, depending on the use case. + To support these use cases, the explicit dimensionality of these fields is not specified, but it can be inferred + by the presense of and shape of accompanying fields, such as incident_wavelength_weights for a polychromatic beam. + DEBUG - ===== ATTRS (//entry/instrument/beam_pump_0@NX_class) DEBUG - value: NXbeam DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] @@ -950,14 +981,20 @@ DEBUG - @NX_class [NX_CHAR] DEBUG - DEBUG - ===== FIELD (//entry/instrument/beam_pump_0/average_power): DEBUG - value: 6.21289 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/average_power +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/average_power): DEBUG - + Average power at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_pump_0/average_power@units) DEBUG - value: uW -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/average_power +DEBUG - NXbeam.nxdl.xml:/average_power@units [NX_POWER] DEBUG - ===== FIELD (//entry/instrument/beam_pump_0/center_wavelength): DEBUG - value: 800 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] @@ -975,7 +1012,9 @@ DEBUG - classes: NXbeam.nxdl.xml:/distance DEBUG - <> DEBUG - documentation (NXbeam.nxdl.xml:/distance): -DEBUG - Distance from sample. Note, it is recommended to use NXtransformations instead. +DEBUG - + Distance from sample. Note, it is recommended to use NXtransformations instead. + DEBUG - ===== ATTRS (//entry/instrument/beam_pump_0/distance@units) DEBUG - value: cm DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] @@ -984,14 +1023,20 @@ NXbeam.nxdl.xml:/distance DEBUG - NXbeam.nxdl.xml:/distance@units [NX_LENGTH] DEBUG - ===== FIELD (//entry/instrument/beam_pump_0/fluence): DEBUG - value: 5 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/fluence +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/fluence): DEBUG - + Incident fluence at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_pump_0/fluence@units) DEBUG - value: mJ/cm^2 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/fluence +DEBUG - NXbeam.nxdl.xml:/fluence@units [NX_ANY] DEBUG - ===== FIELD (//entry/instrument/beam_pump_0/photon_energy): DEBUG - value: 1.55 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] @@ -1019,24 +1064,36 @@ DEBUG - NOT IN SCHEMA DEBUG - DEBUG - ===== FIELD (//entry/instrument/beam_pump_0/pulse_duration): DEBUG - value: 50 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_duration +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/pulse_duration): DEBUG - + FWHM duration of the pulses at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_pump_0/pulse_duration@units) DEBUG - value: fs -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_duration +DEBUG - NXbeam.nxdl.xml:/pulse_duration@units [NX_TIME] DEBUG - ===== FIELD (//entry/instrument/beam_pump_0/pulse_energy): DEBUG - value: 1.24258 -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_energy +DEBUG - <> +DEBUG - documentation (NXbeam.nxdl.xml:/pulse_energy): DEBUG - + Energy of a single pulse at the diagnostic point + DEBUG - ===== ATTRS (//entry/instrument/beam_pump_0/pulse_energy@units) DEBUG - value: nJ -DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam', 'NX_FLOAT'] +DEBUG - classes: +NXbeam.nxdl.xml:/pulse_energy +DEBUG - NXbeam.nxdl.xml:/pulse_energy@units [NX_ENERGY] DEBUG - ===== FIELD (//entry/instrument/beam_pump_0/size_x): DEBUG - value: 500 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXbeam'] @@ -1059,14 +1116,20 @@ DEBUG - NOT IN SCHEMA DEBUG - DEBUG - ===== FIELD (//entry/instrument/energy_resolution): DEBUG - value: 100 -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/energy_resolution +DEBUG - <> +DEBUG - documentation (NXinstrument.nxdl.xml:/energy_resolution): DEBUG - + Energy resolution of the experiment (FWHM or gaussian broadening) + DEBUG - ===== ATTRS (//entry/instrument/energy_resolution@units) DEBUG - value: meV -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/energy_resolution +DEBUG - NXinstrument.nxdl.xml:/energy_resolution@units [NX_ENERGY] DEBUG - ===== GROUP (//entry/instrument/manipulator [NXarpes::/NXentry/NXinstrument/NXpositioner]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXpositioner'] DEBUG - classes: @@ -1272,7 +1335,9 @@ DEBUG - classes: NXinstrument.nxdl.xml:/name DEBUG - <> DEBUG - documentation (NXinstrument.nxdl.xml:/name): -DEBUG - Name of instrument +DEBUG - + Name of instrument + DEBUG - ===== GROUP (//entry/instrument/source [NXarpes::/NXentry/NXinstrument/NXsource]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] DEBUG - classes: @@ -1285,7 +1350,9 @@ DEBUG - DEBUG - documentation (NXinstrument.nxdl.xml:/SOURCE): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:): -DEBUG - The neutron or x-ray storage ring/facility. +DEBUG - + The neutron or x-ray storage ring/facility. + DEBUG - ===== ATTRS (//entry/instrument/source@NX_class) DEBUG - value: NXsource DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] @@ -1302,7 +1369,9 @@ DEBUG - classes: NXsource.nxdl.xml:/bunch_distance DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/bunch_distance): -DEBUG - For storage rings, time between bunches +DEBUG - + For storage rings, time between bunches + DEBUG - ===== ATTRS (//entry/instrument/source/bunch_distance@units) DEBUG - value: us DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -1316,7 +1385,9 @@ DEBUG - classes: NXsource.nxdl.xml:/bunch_length DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/bunch_length): -DEBUG - For storage rings, temporal length of the bunch +DEBUG - + For storage rings, temporal length of the bunch + DEBUG - ===== ATTRS (//entry/instrument/source/bunch_length@units) DEBUG - value: fs DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -1360,7 +1431,9 @@ DEBUG - classes: NXsource.nxdl.xml:/current DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/current): -DEBUG - Accelerator, X-ray tube, or storage ring current +DEBUG - + Accelerator, X-ray tube, or storage ring current + DEBUG - ===== ATTRS (//entry/instrument/source/current@units) DEBUG - value: uA DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -1375,10 +1448,10 @@ NXsource.nxdl.xml:/energy DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/energy): DEBUG - - Source energy. - For storage rings, this would be the particle beam energy. - For X-ray tubes, this would be the excitation voltage. - + Source energy. + For storage rings, this would be the particle beam energy. + For X-ray tubes, this would be the excitation voltage. + DEBUG - ===== ATTRS (//entry/instrument/source/energy@units) DEBUG - value: MeV DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -1392,7 +1465,9 @@ DEBUG - classes: NXsource.nxdl.xml:/frequency DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/frequency): -DEBUG - Frequency of pulsed source +DEBUG - + Frequency of pulsed source + DEBUG - ===== ATTRS (//entry/instrument/source/frequency@units) DEBUG - value: Hz DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -1409,7 +1484,9 @@ DEBUG - enumeration (NXsource.nxdl.xml:/mode): DEBUG - -> Single Bunch DEBUG - -> Multi Bunch DEBUG - documentation (NXsource.nxdl.xml:/mode): -DEBUG - source operating mode +DEBUG - + source operating mode + DEBUG - ===== FIELD (//entry/instrument/source/name): DEBUG - value: b'FLASH' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -1420,7 +1497,9 @@ DEBUG - <> DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/name): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/name): -DEBUG - Name of source +DEBUG - + Name of source + DEBUG - ===== FIELD (//entry/instrument/source/number_of_bunches): DEBUG - value: 500 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_INT'] @@ -1428,7 +1507,9 @@ DEBUG - classes: NXsource.nxdl.xml:/number_of_bunches DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/number_of_bunches): -DEBUG - For storage rings, the number of bunches in use. +DEBUG - + For storage rings, the number of bunches in use. + DEBUG - ===== FIELD (//entry/instrument/source/number_of_bursts): DEBUG - value: 1 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] @@ -1455,7 +1536,9 @@ DEBUG - -> proton DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/probe): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/probe): -DEBUG - type of radiation probe (pick one from the enumerated list and spell exactly) +DEBUG - + type of radiation probe (pick one from the enumerated list and spell exactly) + DEBUG - ===== FIELD (//entry/instrument/source/top_up): DEBUG - value: True DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_BOOLEAN'] @@ -1463,7 +1546,9 @@ DEBUG - classes: NXsource.nxdl.xml:/top_up DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/top_up): -DEBUG - Is the synchrotron operating in top_up mode? +DEBUG - + Is the synchrotron operating in top_up mode? + DEBUG - ===== FIELD (//entry/instrument/source/type): DEBUG - value: b'Free Electron Laser' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -1488,7 +1573,9 @@ DEBUG - -> Metal Jet X-ray DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/type): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/type): -DEBUG - type of radiation source (pick one from the enumerated list and spell exactly) +DEBUG - + type of radiation source (pick one from the enumerated list and spell exactly) + DEBUG - ===== GROUP (//entry/instrument/source_pump [NXarpes::/NXentry/NXinstrument/NXsource]): DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] DEBUG - classes: @@ -1501,7 +1588,9 @@ DEBUG - DEBUG - documentation (NXinstrument.nxdl.xml:/SOURCE): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:): -DEBUG - The neutron or x-ray storage ring/facility. +DEBUG - + The neutron or x-ray storage ring/facility. + DEBUG - ===== ATTRS (//entry/instrument/source_pump@NX_class) DEBUG - value: NXsource DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] @@ -1518,7 +1607,9 @@ DEBUG - classes: NXsource.nxdl.xml:/bunch_distance DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/bunch_distance): -DEBUG - For storage rings, time between bunches +DEBUG - + For storage rings, time between bunches + DEBUG - ===== ATTRS (//entry/instrument/source_pump/bunch_distance@units) DEBUG - value: us DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -1532,7 +1623,9 @@ DEBUG - classes: NXsource.nxdl.xml:/bunch_length DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/bunch_length): -DEBUG - For storage rings, temporal length of the bunch +DEBUG - + For storage rings, temporal length of the bunch + DEBUG - ===== ATTRS (//entry/instrument/source_pump/bunch_length@units) DEBUG - value: fs DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -1566,7 +1659,9 @@ DEBUG - classes: NXsource.nxdl.xml:/frequency DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/frequency): -DEBUG - Frequency of pulsed source +DEBUG - + Frequency of pulsed source + DEBUG - ===== ATTRS (//entry/instrument/source_pump/frequency@units) DEBUG - value: Hz DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_FLOAT'] @@ -1583,7 +1678,9 @@ DEBUG - enumeration (NXsource.nxdl.xml:/mode): DEBUG - -> Single Bunch DEBUG - -> Multi Bunch DEBUG - documentation (NXsource.nxdl.xml:/mode): -DEBUG - source operating mode +DEBUG - + source operating mode + DEBUG - ===== FIELD (//entry/instrument/source_pump/name): DEBUG - value: b'User Laser @ FLASH' DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_CHAR'] @@ -1594,7 +1691,9 @@ DEBUG - <> DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/name): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/name): -DEBUG - Name of source +DEBUG - + Name of source + DEBUG - ===== FIELD (//entry/instrument/source_pump/number_of_bunches): DEBUG - value: 400 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource', 'NX_INT'] @@ -1602,7 +1701,9 @@ DEBUG - classes: NXsource.nxdl.xml:/number_of_bunches DEBUG - <> DEBUG - documentation (NXsource.nxdl.xml:/number_of_bunches): -DEBUG - For storage rings, the number of bunches in use. +DEBUG - + For storage rings, the number of bunches in use. + DEBUG - ===== FIELD (//entry/instrument/source_pump/number_of_bursts): DEBUG - value: 1 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] @@ -1629,7 +1730,9 @@ DEBUG - -> proton DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/probe): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/probe): -DEBUG - type of radiation probe (pick one from the enumerated list and spell exactly) +DEBUG - + type of radiation probe (pick one from the enumerated list and spell exactly) + DEBUG - ===== FIELD (//entry/instrument/source_pump/rms_jitter): DEBUG - value: 204.68816194453154 DEBUG - classpath: ['NXentry', 'NXinstrument', 'NXsource'] @@ -1664,27 +1767,41 @@ DEBUG - -> Metal Jet X-ray DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/INSTRUMENT/SOURCE/type): DEBUG - DEBUG - documentation (NXsource.nxdl.xml:/type): -DEBUG - type of radiation source (pick one from the enumerated list and spell exactly) +DEBUG - + type of radiation source (pick one from the enumerated list and spell exactly) + DEBUG - ===== FIELD (//entry/instrument/spatial_resolution): DEBUG - value: 500 -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/spatial_resolution +DEBUG - <> +DEBUG - documentation (NXinstrument.nxdl.xml:/spatial_resolution): DEBUG - + Spatial resolution of the experiment (Airy disk radius) + DEBUG - ===== ATTRS (//entry/instrument/spatial_resolution@units) DEBUG - value: um -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/spatial_resolution +DEBUG - NXinstrument.nxdl.xml:/spatial_resolution@units [NX_LENGTH] DEBUG - ===== FIELD (//entry/instrument/temporal_resolution): DEBUG - value: 100 -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/temporal_resolution +DEBUG - <> +DEBUG - documentation (NXinstrument.nxdl.xml:/temporal_resolution): DEBUG - + Temporal resolution of the experiment (FWHM) + DEBUG - ===== ATTRS (//entry/instrument/temporal_resolution@units) DEBUG - value: fs -DEBUG - classpath: ['NXentry', 'NXinstrument'] -DEBUG - NOT IN SCHEMA -DEBUG - +DEBUG - classpath: ['NXentry', 'NXinstrument', 'NX_FLOAT'] +DEBUG - classes: +NXinstrument.nxdl.xml:/temporal_resolution +DEBUG - NXinstrument.nxdl.xml:/temporal_resolution@units [NX_TIME] DEBUG - ===== FIELD (//entry/run_cycle): DEBUG - value: b'2018 User Run Block 2' DEBUG - classpath: ['NXentry', 'NX_CHAR'] @@ -1692,7 +1809,9 @@ DEBUG - classes: NXentry.nxdl.xml:/run_cycle DEBUG - <> DEBUG - documentation (NXentry.nxdl.xml:/run_cycle): -DEBUG - Such as "2007-3". Some user facilities organize their beam time into run cycles. +DEBUG - + Such as "2007-3". Some user facilities organize their beam time into run cycles. + DEBUG - ===== GROUP (//entry/sample [NXarpes::/NXentry/NXsample]): DEBUG - classpath: ['NXentry', 'NXsample'] DEBUG - classes: @@ -1706,12 +1825,12 @@ DEBUG - documentation (NXentry.nxdl.xml:/SAMPLE): DEBUG - DEBUG - documentation (NXsample.nxdl.xml:): DEBUG - - Any information on the sample. - - This could include scanned variables that - are associated with one of the data dimensions, e.g. the magnetic field, or - logged data, e.g. monitored temperature vs elapsed time. - + Any information on the sample. + + This could include scanned variables that + are associated with one of the data dimensions, e.g. the magnetic field, or + logged data, e.g. monitored temperature vs elapsed time. + DEBUG - ===== ATTRS (//entry/sample@NX_class) DEBUG - value: NXsample DEBUG - classpath: ['NXentry', 'NXsample'] @@ -1766,7 +1885,9 @@ DEBUG - <> DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/SAMPLE/name): DEBUG - Descriptive name of sample DEBUG - documentation (NXsample.nxdl.xml:/name): -DEBUG - Descriptive name of sample +DEBUG - + Descriptive name of sample + DEBUG - ===== FIELD (//entry/sample/preparation_method): DEBUG - value: b'in-vacuum cleave' DEBUG - classpath: ['NXentry', 'NXsample'] @@ -1779,7 +1900,9 @@ DEBUG - classes: NXsample.nxdl.xml:/pressure DEBUG - <> DEBUG - documentation (NXsample.nxdl.xml:/pressure): -DEBUG - Applied pressure +DEBUG - + Applied pressure + DEBUG - ===== ATTRS (//entry/sample/pressure@units) DEBUG - value: mbar DEBUG - classpath: ['NXentry', 'NXsample', 'NX_FLOAT'] @@ -1829,7 +1952,9 @@ DEBUG - classes: NXsample.nxdl.xml:/thickness DEBUG - <> DEBUG - documentation (NXsample.nxdl.xml:/thickness): -DEBUG - sample thickness +DEBUG - + sample thickness + DEBUG - ===== ATTRS (//entry/sample/thickness@units) DEBUG - value: mm DEBUG - classpath: ['NXentry', 'NXsample', 'NX_FLOAT'] @@ -1851,7 +1976,9 @@ DEBUG - <> DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/start_time): DEBUG - DEBUG - documentation (NXentry.nxdl.xml:/start_time): -DEBUG - Starting time of measurement +DEBUG - + Starting time of measurement + DEBUG - ===== FIELD (//entry/title): DEBUG - value: b'Excited-state dynamics of WSe2 in the Valence Band and Core-Levels' DEBUG - classpath: ['NXentry', 'NX_CHAR'] @@ -1862,7 +1989,9 @@ DEBUG - <> DEBUG - documentation (NXarpes.nxdl.xml:/ENTRY/title): DEBUG - DEBUG - documentation (NXentry.nxdl.xml:/title): -DEBUG - Extended title for entry +DEBUG - + Extended title for entry + DEBUG - ======================== DEBUG - === Default Plotable === DEBUG - ======================== diff --git a/tests/dataconverter/test_convert.py b/tests/dataconverter/test_convert.py index f6702bf01..a317c1470 100644 --- a/tests/dataconverter/test_convert.py +++ b/tests/dataconverter/test_convert.py @@ -61,6 +61,8 @@ def restore_xarray_file_from_tmp(tmp_path): ]) def test_find_nxdl(cli_inputs): """Unit test to check if dataconverter can find NXDLs in contributed/applications folder.""" + cli_inputs.extend(["--reader", "example"]) + runner = CliRunner() result = runner.invoke(dataconverter.convert_cli, cli_inputs) if "NXdoesnotexist" in cli_inputs: @@ -110,7 +112,7 @@ def test_cli(caplog, cli_inputs): def test_links_and_virtual_datasets(tmp_path): """A test for the convert CLI to check whether a Dataset object is created, -when the template contains links.""" + when the template contains links.""" move_xarray_file_to_tmp(tmp_path) dirpath = os.path.join(os.path.dirname(__file__), diff --git a/tests/dataconverter/test_helpers.py b/tests/dataconverter/test_helpers.py index 540cf07bb..421f8ce9b 100644 --- a/tests/dataconverter/test_helpers.py +++ b/tests/dataconverter/test_helpers.py @@ -19,6 +19,7 @@ import xml.etree.ElementTree as ET import os +import logging from setuptools import distutils import pytest import numpy as np @@ -27,6 +28,16 @@ from pynxtools.dataconverter.template import Template +def remove_optional_parent(data_dict: Template): + """Completely removes the optional group from the test Template.""" + internal_dict = Template(data_dict) + del internal_dict["/ENTRY[my_entry]/optional_parent/required_child"] + del internal_dict["/ENTRY[my_entry]/optional_parent/optional_child"] + del internal_dict["/ENTRY[my_entry]/optional_parent/req_group_in_opt_group/DATA[data]"] + + return internal_dict + + def alter_dict(data_dict: Template, key: str, value: object): """Helper function to alter a single entry in dict for parametrize.""" if data_dict is not None: @@ -69,6 +80,29 @@ def listify_template(data_dict: Template): return listified_template +@pytest.mark.parametrize("input_data, expected_output", [ + ('2.4E-23', 2.4e-23), + ('28', 28), + ('45.98', 45.98), + ('test', 'test'), + (['59', '3.00005', '498E-36'], np.array([59.0, 3.00005, 4.98e-34])), + ('23 34 444 5000', np.array([23., 34., 444., 5000.])), + ('xrd experiment', 'xrd experiment'), + (None, None), +]) +def test_transform_to_intended_dt(input_data, expected_output): + """Transform to possible numerical method.""" + result = helpers.transform_to_intended_dt(input_data) + + # Use pytest.approx for comparing floating-point numbers + if isinstance(expected_output, np.ndarray): + np.testing.assert_allclose(result, expected_output, rtol=1e-3) + elif isinstance(expected_output, float): + assert result == pytest.approx(expected_output, rel=1e-5) + else: + assert result == expected_output + + @pytest.fixture(name="nxdl_root") def fixture_nxdl_root(): """pytest fixture to load the same NXDL file for all tests.""" @@ -101,31 +135,31 @@ def fixture_filled_test_data(template, tmp_path): tmp_path) template.clear() - template["optional"]["/ENTRY[my_entry]/NXODD_name/float_value"] = 2.0 - template["optional"]["/ENTRY[my_entry]/NXODD_name/float_value/@units"] = "nm" - template["optional"]["/ENTRY[my_entry]/optional_parent/required_child"] = 1 - template["optional"]["/ENTRY[my_entry]/optional_parent/optional_child"] = 1 - template["required"]["/ENTRY[my_entry]/NXODD_name/bool_value"] = True - template["required"]["/ENTRY[my_entry]/NXODD_name/int_value"] = 2 - template["required"]["/ENTRY[my_entry]/NXODD_name/int_value/@units"] = "eV" - template["required"]["/ENTRY[my_entry]/NXODD_name/posint_value"] = np.array([1, 2, 3], - dtype=np.int8) - template["required"]["/ENTRY[my_entry]/NXODD_name/posint_value/@units"] = "kg" - template["required"]["/ENTRY[my_entry]/NXODD_name/char_value"] = "just chars" - template["required"]["/ENTRY[my_entry]/definition"] = "NXtest" - template["required"]["/ENTRY[my_entry]/definition/@version"] = "2.4.6" - template["required"]["/ENTRY[my_entry]/program_name"] = "Testing program" - template["required"]["/ENTRY[my_entry]/NXODD_name/type"] = "2nd type" - template["required"]["/ENTRY[my_entry]/NXODD_name/date_value"] = ("2022-01-22T12" - ":14:12.05018+00:00") - template["optional"]["/ENTRY[my_entry]/required_group/description"] = "An example description" - template["optional"]["/ENTRY[my_entry]/required_group2/description"] = "An example description" - template["undocumented"]["/ENTRY[my_entry]/does/not/exist"] = "random" - template["undocumented"]["/ENTRY[my_entry]/links/ext_link"] = {"link": - f"{tmp_path}/" - f"xarray_saved_small_cali" - f"bration.h5:/axes/ax3" - } + template["/ENTRY[my_entry]/NXODD_name/float_value"] = 2.0 + template["/ENTRY[my_entry]/NXODD_name/float_value/@units"] = "nm" + template["/ENTRY[my_entry]/optional_parent/required_child"] = 1 + template["/ENTRY[my_entry]/optional_parent/optional_child"] = 1 + template["/ENTRY[my_entry]/NXODD_name/bool_value"] = True + template["/ENTRY[my_entry]/NXODD_name/int_value"] = 2 + template["/ENTRY[my_entry]/NXODD_name/int_value/@units"] = "eV" + template["/ENTRY[my_entry]/NXODD_name/posint_value"] = np.array([1, 2, 3], + dtype=np.int8) + template["/ENTRY[my_entry]/NXODD_name/posint_value/@units"] = "kg" + template["/ENTRY[my_entry]/NXODD_name/char_value"] = "just chars" + template["/ENTRY[my_entry]/definition"] = "NXtest" + template["/ENTRY[my_entry]/definition/@version"] = "2.4.6" + template["/ENTRY[my_entry]/program_name"] = "Testing program" + template["/ENTRY[my_entry]/NXODD_name/type"] = "2nd type" + template["/ENTRY[my_entry]/NXODD_name/date_value"] = ("2022-01-22T12" + ":14:12.05018+00:00") + template["/ENTRY[my_entry]/required_group/description"] = "An example description" + template["/ENTRY[my_entry]/required_group2/description"] = "An example description" + template["/ENTRY[my_entry]/does/not/exist"] = "random" + template["/ENTRY[my_entry]/links/ext_link"] = {"link": + f"{tmp_path}/" + f"xarray_saved_small_cali" + f"bration.h5:/axes/ax3" + } yield template @@ -148,7 +182,11 @@ def fixture_filled_test_data(template, tmp_path): TEMPLATE["required"]["/ENTRY[my_entry]/NXODD_name/date_value"] = "2022-01-22T12:14:12.05018+00:00" # pylint: disable=E1126 TEMPLATE["optional"]["/ENTRY[my_entry]/required_group/description"] = "An example description" TEMPLATE["optional"]["/ENTRY[my_entry]/required_group2/description"] = "An example description" -# TEMPLATE["optional_parents"].append("/ENTRY[entry]/optional_parent") +TEMPLATE["required"]["/ENTRY[my_entry]/optional_parent/req_group_in_opt_group/DATA[data]"] = 1 +TEMPLATE["lone_groups"] = ['/ENTRY[entry]/required_group', + '/ENTRY[entry]/required_group2', + '/ENTRY[entry]/optional_parent/req_group_in_opt_group'] +TEMPLATE["optional"]["/@default"] = "Some NXroot attribute" @pytest.mark.parametrize("data_dict,error_message", [ @@ -241,13 +279,11 @@ def fixture_filled_test_data(template, tmp_path): id="valid-data-dict"), pytest.param( remove_from_dict(TEMPLATE, "/ENTRY[my_entry]/required_group/description"), - ("The data entry corresponding to /ENTRY[entry]/required_group " - "is required and hasn't been supplied by the reader."), + "The required group, /ENTRY[entry]/required_group, hasn't been supplied.", id="missing-empty-yet-required-group"), pytest.param( remove_from_dict(TEMPLATE, "/ENTRY[my_entry]/required_group2/description"), - ("The data entry corresponding to /ENTRY[entry]/required_group2 " - "is required and hasn't been supplied by the reader."), + "The required group, /ENTRY[entry]/required_group2, hasn't been supplied.", id="missing-empty-yet-required-group2"), pytest.param( alter_dict( @@ -258,6 +294,21 @@ def fixture_filled_test_data(template, tmp_path): (""), id="allow-required-and-empty-group" ), + pytest.param( + remove_from_dict(TEMPLATE, + "/ENTRY[my_entry]/optional_parent/req_group_in_opt_group/DATA[data]", + "required" + ), + ("The required group, /ENTRY[entry]/optional_parent/req_group_in_opt_group, hasn't been " + "supplied while its optional parent, /ENTRY[entry]/optional_parent/" + "req_group_in_opt_group, is supplied."), + id="req-group-in-opt-parent-removed" + ), + pytest.param( + remove_optional_parent(TEMPLATE), + (""), + id="opt-group-completely-removed" + ), ]) def test_validate_data_dict(data_dict, error_message, template, nxdl_root, request): """Unit test for the data validation routine""" @@ -269,12 +320,12 @@ def test_validate_data_dict(data_dict, error_message, template, nxdl_root, reque "no-child-provided-optional-parent", "int-instead-of-chars", "link-dict-instead-of-bool", - "allow-required-and-empty-group"): + "allow-required-and-empty-group", + "opt-group-completely-removed"): helpers.validate_data_dict(template, data_dict, nxdl_root) else: with pytest.raises(Exception) as execinfo: helpers.validate_data_dict(template, data_dict, nxdl_root) - assert (error_message) == str(execinfo.value) @@ -285,7 +336,7 @@ def test_validate_data_dict(data_dict, error_message, template, nxdl_root, reque id="path-exists-in-dict"), pytest.param( "/RANDOM/does/not/@exist", - (False, ""), + (False, None), id="path-does-not-exist-in-dict") ]) def test_path_in_data_dict(nxdl_path, expected, template): @@ -304,3 +355,47 @@ def test_atom_type_extractor_and_hill_conversion(): atom_list = helpers.extract_atom_types(test_chemical_formula) assert expected_atom_types == atom_list + + +def test_writing_of_root_attributes(caplog): + """ + Tests if all root attributes are populated + """ + template = Template() + filename = "my_nexus_file.nxs" + with caplog.at_level(logging.WARNING): + helpers.add_default_root_attributes(template, filename) + + assert "" == caplog.text + + keys_added = template.keys() + assert "/@NX_class" in keys_added + assert template["/@NX_class"] == "NXroot" + assert "/@file_name" in keys_added + assert template["/@file_name"] == filename + assert "/@file_time" in keys_added + assert "/@file_update_time" in keys_added + assert "/@NeXus_repository" in keys_added + assert "/@NeXus_version" in keys_added + assert "/@HDF5_version" in keys_added + assert "/@h5py_version" in keys_added + + +def test_warning_on_root_attribute_overwrite(caplog): + """ + A warning is emitted when a root attribute is overwritten + by pynxtools. + """ + template = Template() + template["/@NX_class"] = "NXwrong" + filname = "my_nexus_file.nxs" + with caplog.at_level(logging.WARNING): + helpers.add_default_root_attributes(template, filname) + error_text = ( + "The NXroot entry '/@NX_class' (value: NXwrong) should not be populated by the reader. " + "This is overwritten by the actually used value 'NXroot'" + ) + assert error_text in caplog.text + + assert "/@NX_class" in template.keys() + assert template["/@NX_class"] == "NXroot" diff --git a/tests/dataconverter/test_readers.py b/tests/dataconverter/test_readers.py index 3d2c86efd..d75344541 100644 --- a/tests/dataconverter/test_readers.py +++ b/tests/dataconverter/test_readers.py @@ -102,3 +102,32 @@ def test_has_correct_read_func(reader): assert isinstance(read_data, Template) assert validate_data_dict(template, read_data, root) + + +@pytest.mark.parametrize("reader_name,nxdl,undocumented_keys", [ + ('mpes', 'NXmpes', []) +]) +def test_shows_correct_warnings(reader_name, nxdl, undocumented_keys): + """ + Checks whether the read function generates the correct warnings. + """ + def_dir = os.path.join(os.getcwd(), "pynxtools", "definitions") + dataconverter_data_dir = os.path.join("tests", "data", "dataconverter") + + input_files = sorted( + glob.glob(os.path.join(dataconverter_data_dir, "readers", reader_name, "*")) + ) + nxdl_file = os.path.join( + def_dir, "contributed_definitions", f"{nxdl}.nxdl.xml" + ) + + root = ET.parse(nxdl_file).getroot() + template = Template() + generate_template_from_nxdl(root, template) + + read_data = get_reader(reader_name)().read( + template=Template(template), file_paths=tuple(input_files) + ) + + assert validate_data_dict(template, read_data, root) + assert list(read_data.undocumented.keys()) == undocumented_keys diff --git a/tests/eln_mapper/__init__.py b/tests/eln_mapper/__init__.py new file mode 100644 index 000000000..7f1819634 --- /dev/null +++ b/tests/eln_mapper/__init__.py @@ -0,0 +1,16 @@ +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/eln_mapper/test_eln_mapper.py b/tests/eln_mapper/test_eln_mapper.py new file mode 100644 index 000000000..17f9130dd --- /dev/null +++ b/tests/eln_mapper/test_eln_mapper.py @@ -0,0 +1,107 @@ +"""This test is dedicated generate_eln converter tool. +""" + +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +import os +from typing import Dict +from click import testing + + +import yaml +from pynxtools.eln_mapper import eln_mapper + + +def check_keys_from_two_dict(dict1: Dict, dict2: Dict): + """Compare keys of two dicts. + + Parameters + ---------- + dict1 : Dict + Dict-1 to compare the key with Dict-2 + dict2 : Dict + Dict-2 to compare the key with Dict-1 + """ + for (key1, val1), (key2, val2) in zip(dict1.items(), dict2.items()): + assert key1 == key2, "Test and Ref yaml file have different keys." + if isinstance(val1, dict) and isinstance(val2, dict): + check_keys_from_two_dict(val1, val2) + + +def test_reader_eln(tmp_path): + """Test eln that goes with reader. + + Parameters + ---------- + tmp_path : pathlib.Path + A temporary path that is created for pytest + """ + + local_dir = os.path.abspath(os.path.dirname(__file__)) + ref_file = os.path.join(local_dir, '../data/eln_mapper/eln.yaml') + + test_file = os.path.join(tmp_path, 'eln.yaml') + cli_run = testing.CliRunner() + cli_run.invoke(eln_mapper.get_eln, [ + "--nxdl", + "NXmpes", + "--skip-top-levels", + 1, + "--output-file", + test_file, + "--eln-type", + 'eln']) + + with open(ref_file, encoding='utf-8', mode='r') as ref_f: + ref_dict = yaml.safe_load(ref_f) + + with open(test_file, encoding='utf-8', mode='r') as test_f: + test_dict = yaml.safe_load(test_f) + + check_keys_from_two_dict(ref_dict, test_dict) + + +def test_scheme_eln(tmp_path): + """Test Eln that goes in Nomad + + Parameters + ---------- + tmp_path : pathlib.Path + A temporary path that is created for pytest + """ + + local_dir = os.path.abspath(os.path.dirname(__file__)) + ref_file = os.path.join(local_dir, '../data/eln_mapper/mpes.scheme.archive.yaml') + + test_file = os.path.join(tmp_path, '.scheme.archive.yaml') + cli_run = testing.CliRunner() + cli_run.invoke(eln_mapper.get_eln, [ + "--nxdl", + "NXmpes", + "--output-file", + test_file, + "--eln-type", + 'scheme_eln']) + with open(ref_file, encoding='utf-8', mode='r') as ref_f: + ref_dict = yaml.safe_load(ref_f) + + with open(test_file, encoding='utf-8', mode='r') as test_f: + test_dict = yaml.safe_load(test_f) + + check_keys_from_two_dict(ref_dict, test_dict) diff --git a/tests/nexus/test_nexus.py b/tests/nexus/test_nexus.py index 894657d7a..d69b0fae2 100644 --- a/tests/nexus/test_nexus.py +++ b/tests/nexus/test_nexus.py @@ -49,9 +49,9 @@ def test_get_nexus_classes_units_attributes(): def test_nexus(tmp_path): - """The nexus test function - -""" + """ + The nexus test function + """ local_dir = os.path.abspath(os.path.dirname(__file__)) example_data = os.path.join(local_dir, '../data/nexus/201805_WSe2_arpes.nxs') logger = logging.getLogger(__name__) @@ -73,15 +73,12 @@ def test_nexus(tmp_path): encoding='utf-8' ) as reffile: ref = reffile.readlines() - assert log == ref - # didn't work with filecmp library - # log = os.path.join(local_dir, 'data/nexus_test_data/nexus_test.log') - # ref = os.path.join(local_dir, 'data/nexus_test_data/Ref2_nexus_test.log') - # print('yoyo', filecmp.cmp(log, ref, shallow=False)) - - # print('Testing of nexus.py is SUCCESSFUL.') + # import filecmp + # # didn't work with filecmp library + # log = os.path.join(local_dir, '../data/nexus_test_data/nexus_test.log') + # ref = os.path.join(local_dir, '../data/nexus_test_data/Ref_nexus_test.log') def test_get_node_at_nxdl_path(): @@ -102,7 +99,7 @@ def test_get_node_at_nxdl_path(): nxdl_file_path = os.path.join( local_dir, - "../../pynxtools/definitions/contributed_definitions/NXem.nxdl.xml" + "../data/nexus/NXtest2.nxdl.xml" ) elem = ET.parse(nxdl_file_path).getroot() node = nexus.get_node_at_nxdl_path( diff --git a/tests/nexus/test_version.py b/tests/nexus/test_version.py new file mode 100644 index 000000000..3fa915ce3 --- /dev/null +++ b/tests/nexus/test_version.py @@ -0,0 +1,16 @@ +""" +Tests the version retrieval for the nexus definitions submodule +""" +import re + +from pynxtools import get_nexus_version + + +def test_get_nexus_version(): + """ + Tests if we get a version string from nexus definitions + """ + version = get_nexus_version() + + assert version is not None + assert re.match(r"v\d{4}\.\d{2}\.post1\.dev\d+\+g[a-z0-9]", version)