diff --git a/dev-requirements.txt b/dev-requirements.txt index 5fe6268fd..0620dae14 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --extra=dev --output-file=dev-requirements.txt --resolver=backtracking pyproject.toml @@ -124,6 +124,7 @@ h5py==3.7.0 # ifes-apt-tc-data-modeling # jupyterlab-h5web # kikuchipy + # nionswift # orix # pyfai # pynxtools (pyproject.toml) @@ -143,17 +144,12 @@ imageio==2.22.1 # via # hyperspy # kikuchipy + # nionswift + # nionswift-io + # nionui # scikit-image importlib-metadata==5.0.0 - # via - # hyperspy - # jupyterlab-server - # nbconvert - # numba -importlib-resources==5.9.0 - # via - # jsonschema - # radioactivedecay + # via hyperspy iniconfig==1.1.1 # via pytest ipykernel==6.16.0 @@ -312,6 +308,22 @@ networkx==2.6.3 # via # radioactivedecay # scikit-image +niondata==0.15.3 + # via + # nionswift + # nionswift-io +nionswift==0.16.8 + # via pynxtools (pyproject.toml) +nionswift-io==0.15.1 + # via nionswift +nionui==0.6.10 + # via nionswift +nionutils==0.4.6 + # via + # niondata + # nionswift + # nionswift-io + # nionui notebook==6.5.2 # via jupyterlab notebook-shim==0.2.2 @@ -344,6 +356,10 @@ numpy==1.21.6 # kikuchipy # lmfit # matplotlib + # niondata + # nionswift + # nionswift-io + # nionui # numba # numcodecs # numexpr @@ -407,13 +423,12 @@ pillow==9.2.0 # via # imageio # matplotlib + # nionswift # scikit-image pint==0.18 # via hyperspy pip-tools==6.13.0 # via pynxtools (pyproject.toml) -pkgutil-resolve-name==1.3.10 - # via jsonschema platformdirs==2.5.2 # via # jupyter-core @@ -487,8 +502,11 @@ python-dateutil==2.8.2 pytz==2022.4 # via # babel + # nionswift # pandas # pynxtools (pyproject.toml) +pytz-deprecation-shim==0.1.0.post0 + # via tzlocal pywavelets==1.3.0 # via scikit-image pyxem==0.15.0 @@ -534,6 +552,8 @@ scipy==1.7.3 # hyperspy # kikuchipy # lmfit + # niondata + # nionswift # orix # pyfai # pynxtools (pyproject.toml) @@ -647,10 +667,14 @@ types-urllib3==1.26.25.5 # via types-requests typing-extensions==4.3.0 # via - # astroid # mypy # numcodecs - # pylint +tzdata==2023.3 + # via pytz-deprecation-shim +tzlocal==4.3 + # via + # nionswift + # pynxtools (pyproject.toml) uncertainties==3.1.7 # via lmfit url-normalize==1.4.3 @@ -682,9 +706,7 @@ zarr==2.12.0 zipfile37==0.1.3 # via pynxtools (pyproject.toml) zipp==3.8.1 - # via - # importlib-metadata - # importlib-resources + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/examples/apm/Write.NXapm.Example.1.ipynb b/examples/apm/Write.NXapm.Example.1.ipynb index ae373fd6f..efae762e3 100644 --- a/examples/apm/Write.NXapm.Example.1.ipynb +++ b/examples/apm/Write.NXapm.Example.1.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Using dataconverter/apm for mapping atom probe microscopy data to NeXus/NXapm" + "## Using dataconverter/apm for mapping atom probe microscopy data to NeXus/HDF5/NXapm" ] }, { @@ -68,7 +68,7 @@ "tags": [] }, "source": [ - "Example data can be found on Zenodo http://dx.doi.org/10.5281/zenodo.6808516." + "Example data can be found on Zenodo https://www.zenodo.org/record/7908429." ] }, { @@ -79,7 +79,7 @@ }, "outputs": [], "source": [ - "import shutil # unpacks in current path unless an additional path argument is provided" + "import zipfile as zp" ] }, { @@ -88,8 +88,16 @@ "metadata": {}, "outputs": [], "source": [ - "! curl --output APM.LEAP.Datasets.1.zip https://zenodo.org/record/6808516/files/APM.LEAP.Datasets.1.zip\n", - "shutil.unpack_archive(\"APM.LEAP.Datasets.1.zip\")" + "! curl --output usa_denton_smith_apav_si.zip https://zenodo.org/record/7908429/files/usa_denton_smith_apav_si.zip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "zp.ZipFile(\"usa_denton_smith_apav_si.zip\").extractall(path=\"\", members=None, pwd=None)" ] }, { @@ -101,7 +109,7 @@ " The eln_data_apm.yaml file in the example can be edited with a text editor.
\n", "* A file with **reconstructed ion positions** in community, technology partner format with
\n", " the ion positions and mass-to-charge state ratio values for the tomographic reconstruction.
\n", - " POS, ePOS, or APT are allowed.
\n", + " POS, ePOS, or APT are allowed. Inspect some of the above-mentioned examples on Zenodo.
\n", "* A file with **ranging definitions** in community, technology partner format with
\n", " the definitions how mass-to-charge-state-ratio values map on ion species.
\n", " RNG, RRNG and is possible. A MatLab script can be used to inject other representations
\n", @@ -197,26 +205,34 @@ "source": [ "#parser-nexus/tests/data/tools/dataconverter/readers/em_om/\n", "eln_data_file_name = [\"eln_data_apm.yaml\"]\n", - "input_recon_file_name = [\"R31_06365-v02.pos\",\n", + "input_recon_file_name = [\"Si.apt\",\n", + " \"Si.epos\",\n", + " \"Si.pos\",\n", + " \"R31_06365-v02.pos\",\n", " \"R18_58152-v02.epos\",\n", " \"70_50_50.apt\"]\n", "# \"R56_01769-v01.pos\"]\n", - "input_range_file_name = [\"R31_06365-v02.rrng\",\n", + "input_range_file_name = [\"Si.RRNG\",\n", + " \"Si.RNG\",\n", + " \"Si.RNG\",\n", + " \"R31_06365-v02.rrng\",\n", " \"R31_06365-v02.rrng\",\n", " \"R31_06365-v02.rrng\"]\n", "# \"R56_01769.rng.fig.txt\"]\n", "output_file_name = [\"apm.case1.nxs\",\n", " \"apm.case2.nxs\",\n", " \"apm.case3.nxs\",\n", - " \"apm.case4.nxs\"]\n", - "for case_id in [0]: # , 1, 2, 3]:\n", + " \"apm.case4.nxs\",\n", + " \"apm.case5.nxs\",\n", + " \"apm.case6.nxs\"]\n", + "for case_id in [0]:\n", " ELN = eln_data_file_name[0]\n", " INPUT_RECON = input_recon_file_name[case_id]\n", " INPUT_RANGE = input_range_file_name[case_id]\n", " OUTPUT = output_file_name[case_id]\n", "\n", " ! dataconverter --reader apm --nxdl NXapm --input-file $ELN --input-file \\\n", - " $INPUT_RECON --input-file $INPUT_RANGE --output $OUTPUT --output $OUTPUT" + " $INPUT_RECON --input-file $INPUT_RANGE --output $OUTPUT" ] }, { @@ -242,10 +258,7 @@ "outputs": [], "source": [ "# H5Web(OUTPUT)\n", - "H5Web(\"apm.case1.nxs\")\n", - "# H5Web(\"apm.case2.nxs\")\n", - "# H5Web(\"apm.case3.nxs\")\n", - "# H5Web(\"apm.case4.nxs\")" + "H5Web(\"apm.case1.nxs\")" ] }, { @@ -258,7 +271,6 @@ { "cell_type": "markdown", "metadata": { - "jp-MarkdownHeadingCollapsed": true, "tags": [] }, "source": [ @@ -432,6 +444,19 @@ "The apm reader has a functionality to generate synthetic dataset which are meant for pursuing code development." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "
\n", + "This functionality uses recent features of ase which demands an environment that is currently not supported
\n", + "by NOMAD OASIS. As the here exemplified settings for this example are configured to represent an environment
\n", + "matching close to NOMAD users who are interested in this developer functionality should do the following:
\n", + "Run this example in a standalone environment where ase is upgraded to the latest version and then use
\n", + "the generated NeXus files either as is or upload them to NOMAD OASIS.
\n", + "
" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/examples/em_nion/README.md b/examples/em_nion/README.md new file mode 100644 index 000000000..324fd50c8 --- /dev/null +++ b/examples/em_nion/README.md @@ -0,0 +1,24 @@ +## em_nion reader + +This is an example how the em_nion parser/reader/data extractor can be used as a standalone +tool to convert data and metadata from a compressed nionswift project into an NXem-formatted +NeXus/HDF5 file. Further details to the functionalities of the parser are documented +in the parsers sub-directory: + +``` +pynxtools/pynxtools/dataconverter/readers/em_nion +``` + +**Write.NXem_nion.Example.1.ipynb** is the Jupyter notebook which exemplies +how the parser can be used as a standalone version, i.e. without NOMAD. + +**eln_data_em_nion.yaml** is a YAML/text file which contains relevant data which are not +contained typically in files from technology partners. These data have been collected +either by editing the file manually or by using an electronic lab notebook (ELN), +such as the NOMAD ELN. +A few example files from real atom probe reconstructions and ranging definitions are +offered as downloads to run the example with the above-mentioned Juypter notebook. + +Every other ELN can be used with this parser provided that this ELN writes its data +into a YAML file with the same keywords and structure as is exemplified in the +above-mentioned YAML file. \ No newline at end of file diff --git a/examples/em_nion/Write.NXem_nion.Example.1.ipynb b/examples/em_nion/Write.NXem_nion.Example.1.ipynb new file mode 100644 index 000000000..af08fdd0e --- /dev/null +++ b/examples/em_nion/Write.NXem_nion.Example.1.ipynb @@ -0,0 +1,279 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Using dataconverter/em_nion for mapping content of a nionswift project to NeXus/HDF5/NXem" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "### **Step 1:** Check that packages are installed and working in your local Python environment." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Check the result of the query below specifically that `jupyterlab_h5web` and `pynxtools` are installed in your environment.
\n", + "Note that next to the name pynxtools you should see the directory in which it is installed. Otherwise, make sure that you follow
\n", + "the instructions in the `README` files: \n", + "- How to set up a development environment as in the main README \n", + "- Lauch the jupyter lab from this environement as in the README of folder `examples`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! pip list | grep \"h5py\\|nexus\\|jupyter\" && jupyter serverextension list && jupyter labextension list && python -V" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Set the pynxtools directory and start H5Web for interactive exploring of HDF5 files." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from jupyterlab_h5web import H5Web\n", + "print(f\"Current working directory: {os.getcwd()}\")\n", + "print(f\"So-called base, home, or root directory of the pynxtools: {os.getcwd().replace('/examples/em_nion', '')}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### **Step 2:** Download Nionswift-specific example data or try out with one of your own datasets." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "Example data can be found on Zenodo http://dx.doi.org/10.5281/zenodo.7986279." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import zipfile as zp" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! wget https://www.zenodo.org/record/7986279/files/ger_berlin_haas_nionswift_multimodal.zip\n", + "zp.ZipFile(\"ger_berlin_haas_nionswift_multimodal.zip\").extractall(path=\"\", members=None, pwd=None)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "These files should serve exclusively as examples. The dataconverter em_nion for nionswift project always requires a pair of files:\n", + "* A **YAML file with metadata** (either edited manually/or generated via an ELN).
\n", + " The eln_data_apm.yaml file in the example can be edited with a text editor.
\n", + "* A **compressed zip.nionswift file** with the mime type/file name ending \\*.nionswift suffix.
\n", + " This includes the *.nsproj file and a directory or directory nest with *.ndata and *.h5 files
\n", + " which were generated from nionswift.
" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "
\n", + "For GUI-based editing, a NOMAD OASIS instance is needed.
\n", + "
" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "
\n", + "Please note that the metadata inside the provided eln_data_em_nion.yaml file contains example values.
\n", + "These reflect not necessarily the conditions when the raw data for the example were collected!
\n", + "The file is meant to be edited by you if you work with datasets others than the here provided!
\n", + "
" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "### **Step 3:** Run the nionswift-specific dataconverter on the example data." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we run our parser. The --reader flag takes the em_nion reader (em_nion), the --nxdl flag takes the application definition for this technique which is currently NXem.
" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### **Step 3a:** Optionally see the command line help of the dataconverter." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! dataconverter --help" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### **Step 3b:** Optionally explore all paths which NXapm provides." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# to inspect what can/should all be in the NeXus file\n", + "! dataconverter --nxdl NXem --generate-template" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### **Step 3c**: Convert the files in the example into an NXapm-compliant NeXus/HDF5 file." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "#parser-nexus/tests/data/tools/dataconverter/readers/em_om/\n", + "eln_data_file_name = [\"eln_data_em_nion.yaml\"]\n", + "swift_proj_file_name = [\"2022-02-18_Metadata_Kuehbach.zip.nionswift\"]\n", + "output_file_name = [\"nion.case1.nxs\"]\n", + "for case_id in [0]:\n", + " ELN = eln_data_file_name[0]\n", + " SWIFT = swift_proj_file_name[case_id]\n", + " OUTPUT = output_file_name[case_id]\n", + "\n", + " ! dataconverter --reader em_nion --nxdl NXem --input-file $ELN --input-file $SWIFT --output $OUTPUT" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The key take home message is that the command above-specified triggers the automatic creation of the HDF5 file. This *.nxs file, is an HDF5 file." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "### **Step 4:** Inspect the NeXus/HDF5 file using H5Web." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# H5Web(OUTPUT)\n", + "H5Web(\"nion.case1.nxs\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can also visualize the .nxs file by double clicking on it in the file explorer panel to the left side of your jupyter lab screen in the browser." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Contact person for the em_nion reader and related examples in FAIRmat:\n", + "Markus Kühbach, 2023/05
\n", + "\n", + "### Funding\n", + "FAIRmat is a consortium on research data management which is part of the German NFDI.
\n", + "The project is funded by the Deutsche Forschungsgemeinschaft (DFG, German Research Foundation) – project 460197019." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/examples/em_nion/eln_data_em_nion.yaml b/examples/em_nion/eln_data_em_nion.yaml new file mode 100644 index 000000000..455816c26 --- /dev/null +++ b/examples/em_nion/eln_data_em_nion.yaml @@ -0,0 +1,63 @@ +em_lab: + detector: + - local_name: Should better be inferred in the future from a Nion microscope instance description instead of having to add it every time manually. + ebeam_column: + aberration_correction: + applied: true + aperture_em: + - name: C1 + value: 4 + electron_source: + emitter_type: field_emission + voltage: + unit: V + value: 200000 + fabrication: + capabilities: '---' + identifier: ChristophKochGroupsNionMicroscope + model: NionHermes200 + vendor: Nion Co. + instrument_name: Nion Hermes 200kV + location: Berlin + optical_system_em: + beam_current: + unit: A + value: 1.2e-11 + beam_current_description: estimated + magnification: 610000 + semi_convergence_angle: + unit: rad + value: 0.2 + stage_lab: + description: double tilt + name: nothing +entry: + attr_version: nexus-fairmat-proposal successor of 9636feecb79bb32b828b1a9804269573256d7696 + definition: NXem + end_time: '2023-05-25T14:44:00+01:00' + experiment_description: BenediktHaas_MultiModal_ImagingMode_TestDataSet + experiment_identifier: 2022-04-18-BenediktHaas + program: nionswift + program__attr_version: 0.16.8, this is what Markus used for implementing the nionswift project file reader but was it also the version used by Benedikt? + start_time: '2023-05-25T14:44:00+01:00' +sample: + atom_types: + - Cu + description: test + method: experiment + name: Cu + preparation_date: '2023-05-25T14:44:00+01:00' + sample_history: unknown + short_title: Cu + thickness: + unit: m + value: 2.0e-08 +user: +- name: MarkusK + orcid: '0000' +- email: '----' + name: Benedikt +- name: Sherjeel + affiliation: HU Berlin +- name: Christoph + role: B1 task leader diff --git a/examples/em_om/Write.NXem_ebsd.Example.1.ipynb b/examples/em_om/Write.NXem_ebsd.Example.1.ipynb index 96268a529..dd62925fb 100644 --- a/examples/em_om/Write.NXem_ebsd.Example.1.ipynb +++ b/examples/em_om/Write.NXem_ebsd.Example.1.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Using dataconverter/em_om for mapping EBSD/Orientation Microscopy to NeXus/NXem_ebsd" + "## Using dataconverter/em_om for mapping EBSD/Orientation Microscopy to NeXus/HDF5/NXem_ebsd" ] }, { @@ -61,7 +61,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### **Step 2:** Download EM-OM-specific example data or use your own dataset." + "### **Step 2:** Download EM-OM-specific example data or use your own datasets." ] }, { @@ -79,7 +79,7 @@ "metadata": {}, "outputs": [], "source": [ - "import shutil # unpacks in current path unless an additional path argument is provided" + "import zipfile as zp" ] }, { @@ -90,10 +90,18 @@ }, "outputs": [], "source": [ - "! cd $PWD && curl --output em_om_sprint14_01.zip https://zenodo.org/record/7885531/files/em_om_sprint14_01.zip\n", - "shutil.unpack_archive(\"em_om_sprint14_01.zip\")\n", - "! cd $PWD && curl --output em_om_sprint14_02.zip https://zenodo.org/record/7885531/files/em_om_sprint14_02.zip\n", - "shutil.unpack_archive(\"em_om_sprint14_02.zip\")" + "! curl --output em_om_sprint14_01.zip https://zenodo.org/record/7885531/files/em_om_sprint14_01.zip\n", + "! curl --output em_om_sprint14_02.zip https://zenodo.org/record/7885531/files/em_om_sprint14_02.zip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "zp.ZipFile(\"em_om_sprint14_01.zip\").extractall(path=\"\", members=None, pwd=None)\n", + "zp.ZipFile(\"em_om_sprint14_02.zip\").extractall(path=\"\", members=None, pwd=None)" ] }, { @@ -133,7 +141,7 @@ "metadata": {}, "source": [ "
\n", - "Please note that the metadata inside the provided eln_data_em_om.yaml file contains example values.
\n", + "Please note that the metadata inside the provided eln_data_em_om.yaml file are example values.
\n", "These reflect not necessarily the conditions when the raw data for the example were collected!
\n", "The file is meant to be edited by you if you work with datasets others than the here provided!
\n", "
" @@ -146,7 +154,7 @@ "
\n", "Feel free to contact the maintainer of this example to learn more about the parsing capabilities of SEM/EBSD data in NOMAD.
\n", "We have also a draft version which supports importing results from MatLab/MTex and DREAM.3D. We would like to get in contact
\n", - "to document these further, ideally using as diverse examples as possible, maybe also including one of your examples?
\n", + "to document and develop these further, ideally using as diverse examples as possible, maybe also including one of your examples?
\n", "
" ] }, @@ -207,6 +215,15 @@ "### **Step 3c**: Convert the files in the example into an NXem_ebsd-compliant NeXus/HDF5 file." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "
\n", + "In what follows we will show several of the examples that have been implemented for SEM/EBSD.
\n", + "
" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -256,7 +273,7 @@ " \"em_om.case2.nxs\",\n", " \"em_om.case3e.nxs\",\n", " \"em_om.case4.nxs\"]\n", - "for case_id in [0, 1, 2, 3]:\n", + "for case_id in [4]: # [0, 1, 2, 3]:\n", " ELN = eln_data_file_name[0]\n", " INPUT = input_data_file_name[case_id]\n", " OUTPUT = output_file_name[case_id]\n", @@ -287,11 +304,11 @@ "outputs": [], "source": [ "# H5Web(OUTPUT)\n", - "# H5Web(\"em_om.case0.nxs\")\n", + "H5Web(\"em_om.case0.nxs\")\n", "H5Web(\"em_om.case1.nxs\")\n", - "# H5Web(\"em_om.case2.nxs\")\n", - "# H5Web(\"em_om.case3e.nxs\")\n", - "# H5Web(\"em_om.case4.nxs\")" + "H5Web(\"em_om.case2.nxs\")\n", + "H5Web(\"em_om.case3e.nxs\")\n", + "H5Web(\"em_om.case4.nxs\")" ] }, { diff --git a/examples/em_om/eln_data_em_om.yaml b/examples/em_om/eln_data_em_om.yaml index ec12320c5..383d15d2a 100644 --- a/examples/em_om/eln_data_em_om.yaml +++ b/examples/em_om/eln_data_em_om.yaml @@ -1,56 +1,55 @@ -ElectronBackscatterDiffraction: - entry: - attr_version: nexus-fairmat-proposal successor of 9636feecb79bb32b828b1a9804269573256d7696 - definition: NXem_ebsd - workflow_identifier: test_id - workflow_description: test_description - start_time: 2023-02-04T14:43:00.000Z - end_time: 2023-02-04T14:43:00.000Z - program: nexusutils/dataconverter/readers/em_om.py - program__attr_version: undefined - user: - - name: Markus Kühbach - commercial_on_the_fly_indexing: - program: TSL EDAX - program__attr_version: v6.0 - results_file: test.ang - results_file__attr_version: n/a - measurement: - origin: undefined - origin__attr_version: should be collected if possible automatically by parser or RDMS - path: undefined - calibration: - origin: undefined - origin__attr_version: should be collected if possible automatically by parser or RDMS - path: undefined - rotation_conventions: - three_dimensional_rotation_handedness: counter_clockwise - rotation_convention: passive - euler_angle_convention: zxz - axis_angle_convention: rotation_angle_on_interval_zero_to_pi - sign_convention: p_minus_one - processing_reference_frame: - reference_frame_type: right_handed_cartesian - xaxis_direction: east - xaxis_alias: rolling direction (RD) - yaxis_direction: south - yaxis_alias: transverse direction (TD) - zaxis_direction: undefined - zaxis_alias: normal direction (ND) - origin: front, top, left - sample_reference_frame: - reference_frame_type: right_handed_cartesian - xaxis_direction: east - yaxis_direction: south - zaxis_direction: undefined - origin: front, top, left - detector_reference_frame: - reference_frame_type: right_handed_cartesian - xaxis_direction: east - yaxis_direction: south - zaxis_direction: undefined - origin: front, top, left - # not sure where this will be placed +entry: + attr_version: nexus-fairmat-proposal successor of 9636feecb79bb32b828b1a9804269573256d7696 + definition: NXem_ebsd + workflow_identifier: test_id + workflow_description: test_description + start_time: 2023-02-04T14:43:00.000Z + end_time: 2023-02-04T14:43:00.000Z + program: nexusutils/dataconverter/readers/em_om.py + program__attr_version: undefined +user: +- name: Markus Kühbach +commercial_on_the_fly_indexing: + program: TSL EDAX + program__attr_version: v6.0 + results_file: test.ang + results_file__attr_version: n/a +measurement: + origin: undefined + origin__attr_version: should be collected if possible automatically by parser or RDMS + path: undefined +calibration: + origin: undefined + origin__attr_version: should be collected if possible automatically by parser or RDMS + path: undefined +rotation_conventions: + three_dimensional_rotation_handedness: counter_clockwise + rotation_convention: passive + euler_angle_convention: zxz + axis_angle_convention: rotation_angle_on_interval_zero_to_pi + sign_convention: p_minus_one +processing_reference_frame: + reference_frame_type: right_handed_cartesian + xaxis_direction: east + xaxis_alias: rolling direction (RD) + yaxis_direction: south + yaxis_alias: transverse direction (TD) + zaxis_direction: undefined + zaxis_alias: normal direction (ND) + origin: front_top_left +sample_reference_frame: + reference_frame_type: right_handed_cartesian + xaxis_direction: east + yaxis_direction: south + zaxis_direction: undefined + origin: front_top_left +detector_reference_frame: + reference_frame_type: right_handed_cartesian + xaxis_direction: east + yaxis_direction: south + zaxis_direction: undefined + origin: front_top_left +gnomonic_projection: gnomonic_projection_reference_frame: reference_frame_type: right_handed_cartesian xaxis_direction: east @@ -62,5 +61,5 @@ ElectronBackscatterDiffraction: xaxis_normalization_direction: east yaxis_boundary_convention: top yaxis_normalization_direction: south - indexing: - method: hough_transform +indexing: + method: hough_transform diff --git a/examples/em_spctrscpy/Write.NXem.Example.1.ipynb b/examples/em_spctrscpy/Write.NXem.Example.1.ipynb index 186674822..61b0f33d3 100644 --- a/examples/em_spctrscpy/Write.NXem.Example.1.ipynb +++ b/examples/em_spctrscpy/Write.NXem.Example.1.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Using dataconverter/em_spctrscpy for mapping EDX(S) and EELS spectroscopy as well as electron microscopy images to NeXus/NXem" + "## Using dataconverter/em_spctrscpy for mapping EDX(S) and EELS spectroscopy as well as electron microscopy images to NeXus/HDF5/NXem" ] }, { @@ -70,7 +70,7 @@ "tags": [] }, "source": [ - "Example data can be found on Zenodo http://dx.doi.org/10.5281/zenodo.7050774." + "Example data can be found on Zenodo https://zenodo.org/record/7908429." ] }, { @@ -81,7 +81,7 @@ }, "outputs": [], "source": [ - "import shutil # unpacks in current path unless an additional path argument is provided" + "import zipfile as zp" ] }, { @@ -92,8 +92,16 @@ }, "outputs": [], "source": [ - "! cd $PWD && curl --output em-spctrscpy-sprint9-example.zip https://zenodo.org/record/7050774/files/em-spctrscpy-sprint9-example.zip\n", - "shutil.unpack_archive(\"em-spctrscpy-sprint9-example.zip\")" + "! curl --output EM.Various.Datasets.1.zip https://zenodo.org/record/7908429/files/EM.Various.Datasets.1.zip" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "zp.ZipFile(\"EM.Various.Datasets.1.zip\").extractall(path=\"\", members=None, pwd=None)" ] }, { diff --git a/pynxtools/dataconverter/convert.py b/pynxtools/dataconverter/convert.py index 64b9985fb..ca3cc472d 100644 --- a/pynxtools/dataconverter/convert.py +++ b/pynxtools/dataconverter/convert.py @@ -75,7 +75,9 @@ def convert(input_file: Tuple[str], # Reading in the NXDL and generating a template definitions_path = nexus.get_nexus_definitions_path() if nxdl == "NXtest": - nxdl_path = os.path.join("tests", "data", "dataconverter", "NXtest.nxdl.xml") + nxdl_path = os.path.join( + f"{os.path.abspath(os.path.dirname(__file__))}/../../", + "tests", "data", "dataconverter", "NXtest.nxdl.xml") elif nxdl == "NXroot": nxdl_path = os.path.join(definitions_path, "base_classes", "NXroot.nxdl.xml") else: diff --git a/pynxtools/dataconverter/readers/apm/utils/apm_generic_eln_io.py b/pynxtools/dataconverter/readers/apm/utils/apm_generic_eln_io.py index ad53ae808..41677a1eb 100644 --- a/pynxtools/dataconverter/readers/apm/utils/apm_generic_eln_io.py +++ b/pynxtools/dataconverter/readers/apm/utils/apm_generic_eln_io.py @@ -50,12 +50,15 @@ class NxApmNomadOasisElnSchemaParser: # pylint: disable=too-few-public-methods """ def __init__(self, file_name: str, entry_id: int): - self.entry_id = entry_id - if file_name.startswith("eln_data") and entry_id > 0: + print(f"Extracting data from ELN file: {file_name}") + if (file_name.rsplit('/', 1)[-1].startswith("eln_data") + or file_name.startswith("eln_data")) and entry_id > 0: + self.entry_id = entry_id self.file_name = file_name with open(self.file_name, "r", encoding="utf-8") as stream: self.yml = fd.FlatDict(yaml.safe_load(stream), delimiter=":") else: + self.entry_id = 1 self.file_name = "" self.yml = {} diff --git a/pynxtools/dataconverter/readers/em_nion/README.md b/pynxtools/dataconverter/readers/em_nion/README.md new file mode 100644 index 000000000..648e1d919 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/README.md @@ -0,0 +1,41 @@ +# em_nion reader + +## Purpose +Parse convertable content from data and metadata inside an nionswift project to NXem. + +## Input required +* eln_data.yaml file, contextualizing information entered via ELN (user, sample) +* *.nszip, rename *.zip file with the nionswift *.nsproj project file and its data objects + renamed from zip to nszip to avoid that it gets unpacked during the upload + +## Configuration files +For nionswift most data objects are not necessarily linked to concepts available in NeXus. +Internally, nionswift organizes data and metadata as so-called display_items. These can be +thought of as smart objects which have their data and metadata surplus a uuid, a creation +time and a (last) modification time whereby the place in the object hierarchy documented by +the nsproj tree is defined. +These display_items can be images captured during a microscope session, they can be processed +data within nionswift, images, spectra, or even higher-dimensional objects which wrap +n-dimensional numpy arrays. +There is no direct conceptualization in nionswift what an object bests represents as a +concept, is the object representing an image with metadata or an EELS spectrum, or an +omega-q mapping. +Therefore, we use configuration files whereby the rules are implemented how the +em_nion parsers decides which metadata have to be offered in a particular formatted way +by the object so that it qualifies to represent an instance of a respective NeXus concept +offered via NXem, e.g. NXimage_set, NXspectrum_set, NXhyperstack_set. +The configuration files are used to reduce the amount of hard-coded information transfer +to a minimum. But given that different concepts demand different types of e.g. dimension +scale axes to resolve to useful/sensible instance of such concept some concept-specific +parts of the reader are hardcoded. + +## Output +The em_nion reader returns an instance of an NXem NeXus HDF5 file where data for +recognized concepts are mapped with the data and some of their metadata. + +## TODO +* Add example and describe +* Remove the focus of the README.md on the NOMAD OASIS product + +## Contact person for this reader +Markus Kühbach \ No newline at end of file diff --git a/pynxtools/dataconverter/readers/em_nion/__init__.py b/pynxtools/dataconverter/readers/em_nion/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/README.md b/pynxtools/dataconverter/readers/em_nion/concepts/README.md new file mode 100644 index 000000000..113f7d7e7 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/concepts/README.md @@ -0,0 +1,12 @@ +## Scope + +Here we store the json dictionaries whereby the parser identifies if a certain +nionswift display_item can be mapped to an available logical NeXus concept. + +The dictionary is very similar to the approach taken by the jsonmap_reader. +However the order of the dictionary is reversed. Namely, the keys of the dictionary +identify specific names in the json metadata dictionary representation of nionswift. +The left (value) part of the dictionary is a tuple of a string giving the corresponding +variadic (template) path in NXem onto which a value is mapped, the second value in the +tuple is numpy.dtype, if that value is str only then there is a third entry +in the tuple which identifies the expected dimensionality of the data. diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/generic_eln_mapping.py b/pynxtools/dataconverter/readers/em_nion/concepts/generic_eln_mapping.py new file mode 100644 index 000000000..b6af59467 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/concepts/generic_eln_mapping.py @@ -0,0 +1,99 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Dict mapping custom schema instances from eln_data.yaml file on concepts in NXem.""" + +# pylint: disable=no-member,line-too-long + +# releasing line-too-long restriction to avoid having line breaks in the mapping table +# made the experience that when having a widescreen working with the mapping table +# as single-line instructions is more convenient to read and parsable by human eye + +# there are several issues with the current design of how data from the eln_data.yaml are passed +# some quantities from a custom schema instance end up as list of dictionaries, like here +# aperture and user, however in this mapping approach they would demand concept-specific +# modifiers to be picked up by, currently the em_spctrscpy, em_om, apm examples individually +# parse relevant quantities for each section which makes the code difficult to read and +# unnecessarily lengthy, with em_nion we would like to test if instead we can use +# a set of mapping tables whereby to read content from a custom schema instance eln_data.yaml +# results file directly into the template which the em_nion reader has to fill and pass +# then to the data converter + +NxEmElnInput = {"IGNORE": {"fun": "load_from_dict_list", "terms": "em_lab/detector"}, + "IGNORE": {"fun": "load_from", "terms": "em_lab/ebeam_column/aberration_correction/applied"}, + "IGNORE": {"fun": "load_from_dict_list", "terms": "em_lab/ebeam_column/aperture_em"}, + "/ENTRY[entry*]/em_lab/EBEAM_COLUMN[ebeam_column]/electron_source/emitter_type": {"fun": "load_from", "terms": "em_lab/ebeam_column/electron_source/emitter_type"}, + "/ENTRY[entry*]/em_lab/EBEAM_COLUMN[ebeam_column]/electron_source/voltage/@units": {"fun": "load_from", "terms": "em_lab/ebeam_column/electron_source/voltage/unit"}, + "/ENTRY[entry*]/em_lab/EBEAM_COLUMN[ebeam_column]/electron_source/voltage": {"fun": "load_from", "terms": "em_lab/ebeam_column/electron_source/voltage/value"}, + "/ENTRY[entry*]/em_lab/FABRICATION[fabrication]/capabilities": {"fun": "load_from", "terms": "em_lab/fabrication/capabilities"}, + "/ENTRY[entry*]/em_lab/FABRICATION[fabrication]/identifier": {"fun": "load_from", "terms": "em_lab/fabrication/identifier"}, + "/ENTRY[entry*]/em_lab/FABRICATION[fabrication]/model": {"fun": "load_from", "terms": "em_lab/fabrication/model"}, + "/ENTRY[entry*]/em_lab/FABRICATION[fabrication]/vendor": {"fun": "load_from", "terms": "em_lab/fabrication/vendor"}, + "/ENTRY[entry*]/em_lab/instrument_name": {"fun": "load_from", "terms": "em_lab/instrument_name"}, + "/ENTRY[entry*]/em_lab/location": {"fun": "load_from", "terms": "em_lab/location"}, + "IGNORE": {"fun": "load_from", "terms": "em_lab/optical_system_em/beam_current/unit"}, + "IGNORE": {"fun": "load_from", "terms": "em_lab/optical_system_em/beam_current/value"}, + "IGNORE": {"fun": "load_from", "terms": "em_lab/optical_system_em/beam_current_description"}, + "IGNORE": {"fun": "load_from", "terms": "em_lab/optical_system_em/magnification"}, + "IGNORE": {"fun": "load_from", "terms": "em_lab/optical_system_em/semi_convergence_angle/unit"}, + "IGNORE": {"fun": "load_from", "terms": "em_lab/optical_system_em/semi_convergence_angle/value"}, + "/ENTRY[entry*]/em_lab/stage_lab/description": {"fun": "load_from", "terms": "em_lab/stage_lab/description"}, + "/ENTRY[entry*]/em_lab/stage_lab/name": {"fun": "load_from", "terms": "em_lab/stage_lab/name"}, + "/ENTRY[entry*]/@version": {"fun": "load_from", "terms": "entry/attr_version"}, + "/ENTRY[entry*]/definition": {"fun": "load_from", "terms": "entry/definition"}, + "/ENTRY[entry*]/end_time": {"fun": "load_from", "terms": "entry/end_time"}, + "/ENTRY[entry*]/experiment_description": {"fun": "load_from", "terms": "entry/experiment_description"}, + "/ENTRY[entry*]/experiment_identifier": {"fun": "load_from", "terms": "entry/experiment_identifier"}, + "/ENTRY[entry*]/PROGRAM[program*]/program": {"fun": "load_from", "terms": "entry/program"}, + "/ENTRY[entry*]/PROGRAM[program*]/program/@version": {"fun": "load_from", "terms": "entry/program__attr_version"}, + "/ENTRY[entry*]/start_time": {"fun": "load_from", "terms": "entry/start_time"}, + "IGNORE": {"fun": "load_from_list_of_dict", "terms": "user"}} + +# NeXus concept specific mapping tables which require special treatment as the current +# NOMAD OASIS custom schema implementation delivers them as a list of dictionaries instead +# of a directly flattenable list of keyword, value pairs + +NxApertureEmFromListOfDict = {"/ENTRY[entry*]/em_lab/EBEAM_COLUMN[ebeam_column]/APERTURE_EM[aperture_em*]/name": {"fun": "load_from", "terms": "name"}, + "/ENTRY[entry*]/em_lab/EBEAM_COLUMN[ebeam_column]/APERTURE_EM[aperture_em*]/value": {"fun": "load_from", "terms": "value"}} + +NxUserFromListOfDict = {"/ENTRY[entry*]/USER[user*]/name": {"fun": "load_from", "terms": "name"}, + "/ENTRY[entry*]/USER[user*]/affiliation": {"fun": "load_from", "terms": "affiliation"}, + "/ENTRY[entry*]/USER[user*]/address": {"fun": "load_from", "terms": "address"}, + "/ENTRY[entry*]/USER[user*]/email": {"fun": "load_from", "terms": "email"}, + "/ENTRY[entry*]/USER[user*]/orcid": {"fun": "load_from", "terms": "orcid"}, + "/ENTRY[entry*]/USER[user*]/orcid_platform": {"fun": "load_from", "terms": "orcid_platform"}, + "/ENTRY[entry*]/USER[user*]/telephone_number": {"fun": "load_from", "terms": "telephone_number"}, + "/ENTRY[entry*]/USER[user*]/role": {"fun": "load_from", "terms": "role"}, + "/ENTRY[entry*]/USER[user*]/social_media_name": {"fun": "load_from", "terms": "social_media_name"}, + "/ENTRY[entry*]/USER[user*]/social_media_platform": {"fun": "load_from", "terms": "social_media_platform"}} + +NxDetectorListOfDict = {"/ENTRY[entry*]/em_lab/DETECTOR[detector*]/local_name": {"fun": "load_from", "terms": "local_name"}} + +# atom_types is a good example for specific cases where one cannot just blindly map +# the list that comes from the custom schema ELN instance, because +# people may enter invalid types of atoms (which would generate problems in NOMAD OASIS) +# and for NeXus we would like to have a "string of a comma-separated list of element names" + +NxSample = {"IGNORE": {"fun": "load_from", "terms": "sample/atom_types"}, + "/ENTRY[entry*]/sample/description": {"fun": "load_from", "terms": "sample/description"}, + "/ENTRY[entry*]/sample/method": {"fun": "load_from", "terms": "sample/method"}, + "/ENTRY[entry*]/sample/name": {"fun": "load_from", "terms": "sample/name"}, + "/ENTRY[entry*]/sample/preparation_date": {"fun": "load_from", "terms": "sample/preparation_date"}, + "/ENTRY[entry*]/sample/sample_history": {"fun": "load_from", "terms": "sample/sample_history"}, + "/ENTRY[entry*]/sample/short_title": {"fun": "load_from", "terms": "sample/short_title"}, + "/ENTRY[entry*]/sample/thickness": {"fun": "load_from", "terms": "sample/thickness/value"}, + "/ENTRY[entry*]/sample/thickness/@units": {"fun": "load_from", "terms": "sample/thickness/unit"}} diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/nx_image_ang_space.py b/pynxtools/dataconverter/readers/em_nion/concepts/nx_image_ang_space.py new file mode 100644 index 000000000..18162269a --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/concepts/nx_image_ang_space.py @@ -0,0 +1,85 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""The constraints defining if a swift display_item is assumed a NxImageAngSpace concept.""" + +# pylint: disable=no-member,line-too-long + +# AngSpace, i.e. AngularSpace is not Reciprocal space, recall the following +# RealSpace unit category for dimension scale axes is (NX_)LENGTH +# AngSpace unit category for dimension scale axes is (NX_)ANGLE +# ReciSpace unit category for dimension scale axis is ONE_OVER_LENGTH (not in NeXus yet...) + +# releasing line-too-long restriction to avoid having line breaks in the mapping table +# made the experience that when having a widescreen working with the mapping table +# as single-line instructions is more convenient to read and parsable by human eye + + +NxImageAngSpaceDict = {"IGNORE": {"fun": "load_from", "terms": "type"}, + "IGNORE": {"fun": "load_from", "terms": "uuid"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/start_time": {"fun": "convert_iso8601", "terms": ["created", "timezone"]}, + "IGNORE": {"fun": "load_from", "terms": "is_sequence"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/offset"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/scale"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/units"}, + "IGNORE": {"fun": "load_from", "terms": "dimensional_calibrations"}, + "IGNORE": {"fun": "load_from", "terms": "timezone_offset"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/PROCESS[process]/binning": {"fun": "load_from", "terms": "metadata/hardware_source/autostem/Acquisition:Binning"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/PROCESS[process]/dark_mode": {"fun": "load_from", "terms": "metadata/hardware_source/autostem/Acquisition:DarkMode"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/PROCESS[process]/exposure_time": {"fun": "load_from", "terms": "metadata/hardware_source/autostem/Acquisition:ExposureTime(s)"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/PROCESS[process]/exposure_time": "s", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/PROCESS[process]/gain_mode": {"fun": "load_from", "terms": "metadata/hardware_source/autostem/Acquisition:GainMode"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/PROCESS[process]/flipped": {"fun": "load_from", "terms": "metadata/hardware_source/autostem/Acquisition:IsFlipped"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/PROCESS[process]/readout_tlbr": {"fun": "load_from", "terms": ["metadata/hardware_source/autostem/Acquisition:ReadOutTop", "metadata/hardware_source/autostem/Acquisition:ReadOutLeft", "metadata/hardware_source/autostem/Acquisition:ReadOutBottom", "metadata/hardware_source/autostem/Acquisition:ReadOutRight"]}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/PROCESS[process]/normalization": {"fun": "load_from", "terms": "metadata/hardware_source/autostem/Acquisition:ValueNormalization"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/source"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/timestamp"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/sensor_dimensions_hw"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/sensor_readout_area_tlbr"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/is_flipped_horizontally"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/is_gain_corrected"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/is_dark_subtracted"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/frame_number"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/time_point_ns"}, + "IGNORE": "ns", + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/integration_count"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/counts_per_electron"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE[ronchicam]/detector_identifier": {"fun": "load_from", "terms": "metadata/hardware_source/hardware_source_id"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/hardware_source_name"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/exposure"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/binning"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/signal_type"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/valid_rows"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/frame_index"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/channel_index"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/reference_key"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/electron_source/voltage": {"fun": "load_from", "terms": "metadata/instrument/high_tension"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/electron_source/voltage/@units": "V", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/OPTICAL_SYSTEM_EM[optical_system_em]/defocus": {"fun": "load_from", "terms": "metadata/instrument/defocus"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/OPTICAL_SYSTEM_EM[optical_system_em]/defocus": "m", + "IGNORE": {"fun": "load_from", "terms": "title"}, + "IGNORE": {"fun": "load_from", "terms": "session_id"}, + "IGNORE": {"fun": "load_from", "terms": "session"}, + "IGNORE": {"fun": "load_from", "terms": "category"}, + "IGNORE": {"fun": "load_from", "terms": "version"}, + "IGNORE": {"fun": "load_from", "terms": "modified"}, + "IGNORE": {"fun": "load_from", "terms": "data_shape"}, + "IGNORE": {"fun": "load_from", "terms": "data_dtype"}, + "IGNORE": {"fun": "load_from", "terms": "collection_dimension_count"}, + "IGNORE": {"fun": "load_from", "terms": "datum_dimension_count"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/end_time": {"fun": "convert_iso8601", "terms": ["data_modified", "timezone"]}, + "IGNORE": {"fun": "load_from", "terms": "__large_format"}} diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/nx_image_real_space.py b/pynxtools/dataconverter/readers/em_nion/concepts/nx_image_real_space.py new file mode 100644 index 000000000..c3ede2142 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/concepts/nx_image_real_space.py @@ -0,0 +1,201 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""The constraints defining if a swift display_item is assumed a NxImageRealSpace concept.""" + +# pylint: disable=no-member,line-too-long + +# releasing line-too-long restriction to avoid having line breaks in the mapping table +# made the experience that when having a widescreen working with the mapping table +# as single-line instructions is more convenient to read and parsable by human eye + +# current issues: +# hard-coding currently the ebeam deflector is problematic as it is +# to just make the name variadic but then assume there is always only one +# in the specific case of a Nion microscope and which information is available typically +# via swift there one would get information about the scanbox for which the closest +# match is NXebeam_deflector and there is only one scanbox at an Nion Hermes 200 +# but one could customize this implementation here and instead resolve how the scanbox +# is composed of individual groups of deflection coils in which case one could also +# think about using one NXebeam_deflector for every single coil... + +NxImageRealSpaceDict = {"IGNORE": {"fun": "load_from", "terms": "type"}, + "IGNORE": {"fun": "load_from", "terms": "uuid"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/start_time": {"fun": "convert_iso8601", "terms": ["created", "timezone"]}, + "IGNORE": {"fun": "load_from", "terms": "is_sequence"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/offset"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/scale"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/units"}, + "IGNORE": {"fun": "load_from", "terms": "dimensional_calibrations"}, + "IGNORE": {"fun": "load_from", "terms": "timezone"}, + "IGNORE": {"fun": "load_from", "terms": "timezone_offset"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/high_tension"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/defocus"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/electron_source/voltage": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/EHT"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/electron_source/voltage/@units": "V", + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/PMTBF_gain"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/PMTDF_gain"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/STAGE_LAB[stage_lab]/tilt1": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/StageOutA"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/STAGE_LAB[stage_lab]/tilt1/@units": "deg", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/STAGE_LAB[stage_lab]/tilt2": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/StageOutB"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/STAGE_LAB[stage_lab]/tilt2/@units": "deg", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/STAGE_LAB[stage_lab]/position": {"fun": "load_from", "terms": ["metadata/instrument/ImageScanned/StageOutX", "metadata/instrument/ImageScanned/StageOutY", "metadata/instrument/ImageScanned/StageOutZ"]}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/STAGE_LAB[stage_lab]/position/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_1_0/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C10"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_1_0/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_1_2_a/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C12.a"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_1_2_a/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_1_2_b/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C12.b"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_1_2_b/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_2_1_a/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C21.a"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_2_1_a/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_2_1_b/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C21.b"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_2_1_b/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_2_3_a/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C23.a"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_2_3_a/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_2_3_b/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C23.b"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_2_3_b/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_0/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C30"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_0/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_2_a/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C32.a"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_2_a/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_2_b/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C32.b"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_2_b/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_4_a/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C34.a"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_4_a/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_4_b/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C34.b"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_3_4_b/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_5_0/magnitude": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C50"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/aberration_correction/ZEMLIN_TABLEAU/PROCESS[process]/nion/c_5_0/magnitude/@units": "m", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/LENS_EM[lens_em1]/value": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C1 ConstW"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/LENS_EM[lens_em1]/name": "C1", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/LENS_EM[lens_em2]/value": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C2 ConstW"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/LENS_EM[lens_em2]/name": "C2", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/LENS_EM[lens_em3]/value": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C3 ConstW"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_COLUMN[ebeam_column]/LENS_EM[lens_em3]/name": "C3", + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/PMT2_gain"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/SuperFEG.^EmissionCurrent"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/G_2Db"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/LastTuneCurrent"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/OPTICAL_SYSTEM_EM[optical_system_em]/semi_convergence_angle": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/probe_ha"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/OPTICAL_SYSTEM_EM[optical_system_em]/semi_convergence_angle/@units": "mrad", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE_SET[image_set*]/inner_half_angle": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/HAADF_Inner_ha"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE_SET[image_set*]/inner_half_angle/@units": "mrad", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE_SET[image_set*]/outer_half_angle": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/HAADF_Outer_ha"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE_SET[image_set*]/outer_half_angle/@units": "mrad", + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/GeometricProbeSize"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/hardware_source_id"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/hardware_source_name"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_id"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/center": {"fun": "load_from", "terms": ["metadata/scan/center_x_nm", "metadata/scan/center_y_nm"]}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/center/@units": "nm", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/OPTICAL_SYSTEM_EM[optical_system_em]/field_of_view": {"fun": "load_from", "terms": "metadata/scan/fov_nm"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/OPTICAL_SYSTEM_EM[optical_system_em]/field_of_view/@units": "nm", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/rotation": {"fun": "load_from", "terms": "metadata/scan/rotation"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/rotation/@units": "deg", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/rotation_deg"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_context_size"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/subscan_fractional_size"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_size"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/subscan_fractional_center"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/size"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/center_nm"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/pixel_time_us"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/fov_nm"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/rotation_rad"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/external_clock_wait_time": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/external_clock_wait_time_ms"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/external_clock_wait_time": "ms", + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/external_clock_mode": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/external_clock_mode"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/external_scan_mode": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/external_scan_mode"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/external_scan_ratio": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/external_scan_ratio"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/ac_line_sync"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/ac_frame_sync": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/ac_frame_sync"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/flyback_time_us"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/subscan_pixel_size"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/subscan_fractional_size"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/subscan_fractional_center"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/top_left_override"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/data_shape_override"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/state_override"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/section_rect"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/scan_id"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/ac_line_sync": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/ac_line_sync"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/calibration_style": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/calibration_style"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/center_x_nm"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/center_y_nm"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/flyback_time": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/flyback_time_us"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/flyback_time/@units": "µs", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/fov_nm"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/line_time": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/line_time_us"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/line_time/@units": "µs", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/pixel_time_us"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/pixels_x"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/pixels_y"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/pixel_time_target": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/requested_pixel_time_us"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/pixel_time_target/@units": "µs", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/rotation_deg"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/rotation_rad"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac1]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 0"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac2]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 1"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac3]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 2"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac4]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 3"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac5]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 4"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac6]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 5"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac7]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 6"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac8]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 7"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac9]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 8"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac10]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 9"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac11]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 10"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/DAC[dac12]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 DAC 11"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board1]/relay": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 0 Relay"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac1]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 0"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac2]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 1"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac3]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 2"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac4]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 3"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac5]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 4"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac6]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 5"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac7]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 6"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac8]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 7"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac9]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 8"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac10]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 9"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac11]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 10"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/DAC[dac12]/value": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 11"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/em_lab/EBEAM_DEFLECTOR[ebeam_deflector1]/CIRCUIT_BOARD[mag_board2]/relay": {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 Relay"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/scan/valid_rows"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/IMAGE_SET[image_set*]/detector_identifier": {"fun": "load_from", "terms": "metadata/hardware_source/hardware_source_id"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/hardware_source_name"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/exposure"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/hardware_source/frame_index"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/channel_id"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/channel_name"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/pixel_time_us"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/line_time_us"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/hardware_source/valid_rows"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/hardware_source/channel_index"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/hardware_source/reference_key"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/hardware_source/view_id"}, + "IGNORE": {"fun": "load_from", "terms": "title"}, + "IGNORE": {"fun": "load_from", "terms": "session_id"}, + "IGNORE": {"fun": "load_from", "terms": "session"}, + "IGNORE": {"fun": "load_from", "terms": "category"}, + "IGNORE": {"fun": "load_from", "terms": "version"}, + "IGNORE": {"fun": "load_from", "terms": "modified"}, + "IGNORE": {"fun": "load_from", "terms": "data_shape"}, + "IGNORE": {"fun": "load_from", "terms": "data_dtype"}, + "IGNORE": {"fun": "load_from", "terms": "collection_dimension_count"}, + "IGNORE": {"fun": "load_from", "terms": "datum_dimension_count"}, + "/ENTRY[entry*]/measurement/EVENT_DATA_EM[event_data_em*]/end_time": {"fun": "convert_iso8601", "terms": ["data_modified", "timezone"]}} diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/nx_spectrum_eels.py b/pynxtools/dataconverter/readers/em_nion/concepts/nx_spectrum_eels.py new file mode 100644 index 000000000..eed0efe7e --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/concepts/nx_spectrum_eels.py @@ -0,0 +1,232 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""The constraints defining if a swift display_item is assumed a NxSpectrumSetEels concept.""" + +# pylint: disable=no-member,line-too-long + +# releasing line-too-long restriction to avoid having line breaks in the mapping table +# made the experience that when having a widescreen working with the mapping table +# as single-line instructions is more convenient to read and parsable by human eye + + +NxSpectrumEels = {"IGNORE": {"fun": "load_from", "terms": "type"}, + "IGNORE": {"fun": "load_from", "terms": "uuid"}, + "IGNORE": {"fun": "convert_iso8601", "terms": ["created", "timezone"]}, + "IGNORE": {"fun": "load_from", "terms": "is_sequence"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/offset"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/scale"}, + "IGNORE": {"fun": "load_from", "terms": "intensity_calibration/units"}, + "IGNORE": {"fun": "load_from", "terms": "dimensional_calibrations"}, + "IGNORE": {"fun": "load_from", "terms": "timezone_offset"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/header_info/header_detail"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/header_info/htype"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/header_info/series"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/auto_summation"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/beam_center_x"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/beam_center_y"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/bit_depth_image"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/bit_depth_readout"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/chi_increment"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/chi_start"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/compression"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/count_time"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/countrate_correction_applied"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/countrate_correction_count_cutoff"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/data_collection_date"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/description"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/detector_distance"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/detector_number"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/detector_readout_time"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/detector_translation"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/eiger_fw_version"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/element"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/flatfield_correction_applied"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/frame_count_time"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/frame_period"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/frame_time"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/kappa_increment"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/kappa_start"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/nimages"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/ntrigger"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/number_of_excluded_pixels"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/omega_increment"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/omega_start"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/phi_increment"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/phi_start"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/photon_energy"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/pixel_mask_applied"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/roi_mode"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/sensor_material"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/sensor_thickness"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/software_version"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/threshold_energy"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/trigger_mode"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/two_theta_increment"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/two_theta_start"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/virtual_pixel_correction_applied"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/wavelength"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/x_pixel_size"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/x_pixels_in_detector"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/y_pixel_size"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/detector_configuration/y_pixels_in_detector"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/bad_pixels"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/processing"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/flip_l_r"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/binning"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/chip_size"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/sensor_dimensions"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/readout_area"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/countrate_correction_cutoff"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/interpolate_racetracks"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/mark_saturated_pixels"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/apply_countrate_correction"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/countrate_correction_factor"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/apply_gain_correction"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/camera_processing_parameters/always_interpolate_racetracks"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/high_tension"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/defocus"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/EHT"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/MajorOL"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/StageOutA"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/StageOutB"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/StageOutX"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/StageOutY"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/StageOutZ"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/probe_ha"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/SuperFEG.^EmissionCurrent"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/LastTuneCurrent"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C10"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C12.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C12.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C21.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C21.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C23.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C23.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C30"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C32.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C32.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C34.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C34.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/ImageRonchigram/C50"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/hardware_source_id"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/hardware_source_name"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/exposure"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/binning"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/hardware_source/signal_type"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/hardware_source_id"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/hardware_source_name"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_id"}, + "IGNORE": {"fun": "load_from", "terms": ["metadata/scan/center_x_nm", "metadata/scan/center_y_nm"]}, + "IGNORE": "nm", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/fov_nm"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/rotation"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/rotation_deg"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_context_size"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_size"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/size"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/center_nm"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/pixel_time_us"}, + "IGNORE": "µs", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/fov_nm"}, + "IGNORE": "nm", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/rotation_rad"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/external_clock_wait_time_ms"}, + "IGNORE": "ms", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/external_clock_mode"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/external_scan_mode"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/external_scan_ratio"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/ac_line_sync"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/ac_frame_sync"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/flyback_time_us"}, + "IGNORE": "µs", + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/scan_device_parameters/scan_id"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/scan/valid_rows"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/high_tension"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/defocus"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/EHT"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/PMTBF_gain"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/PMTDF_gain"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/StageOutA"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/StageOutB"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/StageOutX"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/StageOutY"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/StageOutZ"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C10"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C12.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C12.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C21.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C21.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C23.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C23.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C30"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C32.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C32.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C34.a"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C34.b"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C50"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C1 ConstW"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C2 ConstW"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/C3 ConstW"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/PMT2_gain"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/SuperFEG.^EmissionCurrent"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/G_2Db"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/LastTuneCurrent"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/probe_ha"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/HAADF_Inner_ha"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/HAADF_Outer_ha"}, + "IGNORE": {"fun": "load_from", "terms": "metadata/instrument/ImageScanned/GeometricProbeSize"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/x_shifter"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/blanker"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/x_shift_delay"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/focus"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/focus_delay"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/auto_dark_subtract"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/processing"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/blanker_delay"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/sum_frames"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/camera_hardware_source_id"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/use_multi_eels_calibration"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/shift_each_sequence_slice"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/y_shifter"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/x_units_per_ev"}, + "UNCLEAR": "eV", + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/y_units_per_px"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/y_shift_delay"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/saturation_value"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/y_align"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.settings/stitch_spectra"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.parameters/index"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.parameters/offset_x"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.parameters/offset_y"}, + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.parameters/exposure_ms"}, + "UNCLEAR": "ms", + "UNCLEAR": {"fun": "load_from", "terms": "metadata/MultiAcquire.parameters/frames"}, + "IGNORE": {"fun": "load_from", "terms": "title"}, + "IGNORE": {"fun": "load_from", "terms": "session_id"}, + "IGNORE": {"fun": "load_from", "terms": "session"}, + "IGNORE": {"fun": "load_from", "terms": "category"}, + "IGNORE": {"fun": "load_from", "terms": "version"}, + "IGNORE": {"fun": "load_from", "terms": "modified"}, + "IGNORE": {"fun": "load_from", "terms": "data_shape"}, + "IGNORE": {"fun": "load_from", "terms": "data_dtype"}, + "IGNORE": {"fun": "load_from", "terms": "collection_dimension_count"}, + "IGNORE": {"fun": "load_from", "terms": "datum_dimension_count"}, + "IGNORE": {"fun": "convert_iso8601", "terms": ["data_modified", "timezone"]}, + "UNCLEAR": {"fun": "load_from", "terms": "session/site"}} + +# {"fun": "convert_iso8601", "terms": ["created", "timezone"]} diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/swift_display_items_to_nx_concepts.py b/pynxtools/dataconverter/readers/em_nion/concepts/swift_display_items_to_nx_concepts.py new file mode 100644 index 000000000..39c02051f --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/concepts/swift_display_items_to_nx_concepts.py @@ -0,0 +1,100 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Utility for analyzing swift data/metadata of display_items to identify NeXus concepts.""" + +# pylint: disable=no-member + +import flatdict as fd + + +metadata_constraints = {"type": str, + "uuid": str, + "created": str, + "data_shape": list, + "data_dtype": str, + "is_sequence": bool, + "dimensional_calibrations": list, + "data_modified": str, + "timezone": str, + "timezone_offset": str, + "metadata/hardware_source/hardware_source_id": str, + "version": int, + "modified": str} + +nexus_concept_dict = {"ITULL": "NxImageSetRealSpace", + "IFLL": "NxImageSetRealSpace", + "IFL": None, + "ITUL": None, + "STUUE": "NxSpectrumSetEelsOmegaQ", + "STULLE": "NxSpectrumSetEels", + "STULLUE": "NxSpectrumSetOmegaQ", + "SFLLUE": "NxSpectrumSetOmegaQ", + "SFLLE": "NxSpectrumSetEels", + "SFUE": "NxSpectrumSetEelsOmegaQ", + "RFAA": "NxImageAngSpace", + "RTUAA": "NxImageAngSpace"} + + +def check_existence_of_required_fields(dct: dict, constraint_dct: dict) -> bool: + """Checks if given dictionary has fields with values which match constraints.""" + flat_dct = fd.FlatDict(dct, delimiter='/') + for keyword, dtyp in constraint_dct.items(): + if keyword not in flat_dct.keys(): + print(f"-->{keyword} not keyword") + return False + if not isinstance(flat_dct[keyword], dtyp): + print(f"-->{keyword} not instance") + return False + return True + + +def identify_nexus_concept_key(dct: dict) -> str: + """Identifies best candidate to map data/metadata on a NeXus concept.""" + # ##MK::imporve that we work ideally always with the flattened dictionary + nexus_concept_key = "UNKNOWN" + if check_existence_of_required_fields(dct, metadata_constraints) is False: + return nexus_concept_key + lst_unit_catg = [] + for axis_dict in dct["dimensional_calibrations"]: # inspect axes in sequence + if isinstance(axis_dict, dict): + if set(axis_dict.keys()) == set(["offset", "scale", "units"]): + unit_arg = axis_dict["units"].lower() + if unit_arg == "": + lst_unit_catg.append("U") + elif unit_arg in ["nm"]: # replace by pint to pick up on any length + lst_unit_catg.append("L") + elif unit_arg in ["ev"]: # replace by pint to pick up on any enery + lst_unit_catg.append("E") + elif unit_arg in ["rad"]: # replace by pint to pick up on angle unit + lst_unit_catg.append("A") + else: + return nexus_concept_key + set_unit_catg = set(lst_unit_catg) + + if "A" in set_unit_catg: + nexus_concept_key \ + = f"R{str(dct['is_sequence']).upper()[0:1]}{''.join(lst_unit_catg)}" + elif "E" in set_unit_catg: + nexus_concept_key \ + = f"S{str(dct['is_sequence']).upper()[0:1]}{''.join(lst_unit_catg)}" + elif "E" not in set_unit_catg: + nexus_concept_key \ + = f"I{str(dct['is_sequence']).upper()[0:1]}{''.join(lst_unit_catg)}" + else: + return nexus_concept_key + return nexus_concept_key diff --git a/pynxtools/dataconverter/readers/em_nion/concepts/swift_handle_nx_concepts.py b/pynxtools/dataconverter/readers/em_nion/concepts/swift_handle_nx_concepts.py new file mode 100644 index 000000000..6ee855b84 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/concepts/swift_handle_nx_concepts.py @@ -0,0 +1,116 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Utilities for working with NeXus concepts encoded as Python dicts in the concepts dir.""" + +# pylint: disable=no-member + +import pytz + +from datetime import datetime + + +def load_from_modifier(terms, fd_dct): + """Implement modifier which reads values of different type from fd_dct.""" + if isinstance(terms, str): + if terms in fd_dct.keys(): + return fd_dct[terms] + if all(isinstance(entry, str) for entry in terms) is True: + if isinstance(terms, list): + lst = [] + for entry in terms: + lst.append(fd_dct[entry]) + return lst + return None + + +def convert_iso8601_modifier(terms, dct: dict): + """Implement modifier which transforms nionswift time stamps to proper UTC ISO8601.""" + if terms is not None: + if isinstance(terms, str): + if terms in dct.keys(): + return None + elif (isinstance(terms, list)) and (len(terms) == 2) \ + and (all(isinstance(entry, str) for entry in terms) is True): + # assume the first argument is a local time + # assume the second argument is a timezone string + if terms[0] in dct.keys() and terms[1] in dct.keys(): + # handle the case that these times can be arbitrarily formatted + # for now we let ourselves be guided + # by how time stamps are returned in Christoph Koch's + # nionswift instances also formatting-wise + date_time_str = dct[terms[0]].replace("T", " ") + time_zone_str = dct[terms[1]] + if time_zone_str in pytz.all_timezones: + date_time_obj \ + = datetime.strptime(date_time_str, '%Y-%m-%d %H:%M:%S.%f') + utc_time_zone_aware \ + = pytz.timezone(time_zone_str).localize(date_time_obj) + return utc_time_zone_aware + else: + raise ValueError('Invalid timezone string!') + return None + else: + return None + return None + + +def apply_modifier(modifier, dct: dict): + """Interpret a functional mapping using data from dct via calling modifiers.""" + if isinstance(modifier, dict): + # different commands are available + if set(["fun", "terms"]) == set(modifier.keys()): + if modifier["fun"] == "load_from": + return load_from_modifier(modifier["terms"], dct) + if modifier["fun"] == "convert_iso8601": + return convert_iso8601_modifier(modifier["terms"], dct) + elif set(["link"]) == set(modifier.keys()): + # CURRENTLY NOT IMPLEMENTED + # with the jsonmap reader Sherjeel conceptualized "link" + return None + else: + return None + if isinstance(modifier, str): + return modifier + return None + + +# examples/tests how to use modifiers +# modd = "µs" +# modd = {"link": "some_link_to_somewhere"} +# modd = {"fun": "load_from", "terms": "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 11"} +# modd = {"fun": "load_from", "terms": ["metadata/scan/scan_device_properties/mag_boards/MagBoard 1 DAC 11", +# "metadata/scan/scan_device_properties/mag_boards/MagBoard 1 Relay"]} +# modd = {"fun": "convert_iso8601", "terms": ["data_modified", "timezone"]} +# print(apply_modifier(modd, yml)) + +def variadic_path_to_specific_path(path: str, instance_identifier: list): + """Transforms a variadic path to an actual path with instances.""" + if (path is not None) and (path != ""): + narguments = path.count("*") + if narguments == 0: # path is not variadic + return path + if len(instance_identifier) >= narguments: + tmp = path.split("*") + if len(tmp) == narguments + 1: + nx_specific_path = "" + for idx in range(0, narguments): + nx_specific_path += f"{tmp[idx]}{instance_identifier[idx]}" + idx += 1 + nx_specific_path += f"{tmp[-1]}" + return nx_specific_path + return None diff --git a/pynxtools/dataconverter/readers/em_nion/reader.py b/pynxtools/dataconverter/readers/em_nion/reader.py new file mode 100644 index 000000000..ac785fda3 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/reader.py @@ -0,0 +1,109 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Parser for loading generic orientation microscopy data based on .""" + +# pylint: disable=no-member + +from typing import Tuple, Any + +from pynxtools.dataconverter.readers.base.reader import BaseReader + +from pynxtools.dataconverter.readers.em_nion.utils.use_case_selector \ + import EmNionUseCaseSelector + +from pynxtools.dataconverter.readers.em_nion.utils.em_generic_eln_io \ + import NxEmNionElnSchemaParser + +from pynxtools.dataconverter.readers.em_nion.utils.swift_zipped_project_parser \ + import NxEmNionSwiftProjectParser + +from pynxtools.dataconverter.readers.em_spctrscpy.utils.em_nexus_plots \ + import em_spctrscpy_default_plot_generator + + +class EmNionReader(BaseReader): + """Parse content from nionswift projects. + + Specifically, electron microscopy + towards a NXem.nxdl-compliant NeXus file. + """ + + # pylint: disable=too-few-public-methods + + # Whitelist for the NXDLs that the reader supports and can process + supported_nxdls = ["NXem"] + + # pylint: disable=duplicate-code + def read(self, + template: dict = None, + file_paths: Tuple[str] = None, + objects: Tuple[Any] = None) -> dict: + """Read data from given file, return filled template dictionary em.""" + # pylint: disable=duplicate-code + template.clear() + + # this em_om parser combines multiple sub-parsers + # so we need the following input: + # logical analysis which use case + # data input from an ELN (using an ELN-agnostic) YAML representation + # data input from technology partner files, here zipped nionswift project + # directory and file renamed from ending with zip to nszip + # functionalities for creating default plots + + entry_id = 1 + + print("Parse ELN and compressed nionswift project content...") + case = EmNionUseCaseSelector(file_paths) + if case.is_valid is False: + print("Such a combination of input-file(s, if any) is not supported !") + return {} + + print("Parse (meta)data coming from an ELN...") + if case.eln_parser_type == "generic": + eln = NxEmNionElnSchemaParser(case.eln[0], entry_id) + eln.parse(template) + else: + print("No interpretable ELN input found!") + return {} + + print("Parse (numerical) data and metadata from nionswift project...") + if case.prj_parser_type == "nionswift": + swift_parser = NxEmNionSwiftProjectParser(case.prj[0], entry_id) + swift_parser.parse(template) + else: + print("No input-file defined for technology partner data !") + + # at this point the default plots exist already + # we only need to decorate the template to point to the mandatory ROI overview + print("Create NeXus default plottable data...") + em_spctrscpy_default_plot_generator(template, entry_id) + + debugging = False + if debugging is True: + print("Reporting state of template before passing to HDF5 writing...") + for keyword in template.keys(): + print(keyword) + # print(type(template[keyword])) + # print(f"{keyword}, {template[keyword]}") + + print("Forward instantiated template to the NXS writer...") + return template + + +# This has to be set to allow the convert script to use this reader. +READER = EmNionReader diff --git a/pynxtools/dataconverter/readers/em_nion/utils/em_generic_eln_io.py b/pynxtools/dataconverter/readers/em_nion/utils/em_generic_eln_io.py new file mode 100644 index 000000000..8be648477 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/utils/em_generic_eln_io.py @@ -0,0 +1,158 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Parse data from custom schema YAML file onto NeXus concept instances in NXem.""" + +# pylint: disable=no-member + +import numpy as np + +import flatdict as fd + +import yaml + +from ase.data import chemical_symbols + +from pynxtools.dataconverter.readers.em_nion.utils.versioning \ + import NX_EM_NION_ADEF_NAME, NX_EM_NION_ADEF_VERSION + +from pynxtools.dataconverter.readers.em_nion.utils.versioning \ + import NX_EM_NION_EXEC_NAME, NX_EM_NION_EXEC_VERSION + +from pynxtools.dataconverter.readers.em_nion.concepts.swift_handle_nx_concepts \ + import apply_modifier, variadic_path_to_specific_path + +from pynxtools.dataconverter.readers.em_nion.concepts.generic_eln_mapping \ + import NxEmElnInput, NxUserFromListOfDict, NxDetectorListOfDict, NxSample + + +class NxEmNionElnSchemaParser: + """Parse eln_data.yaml dump file content generated from a NOMAD OASIS YAML. + + The functionalities in this ELN YAML parser do not check if the + instantiated template yields an instance which is compliant NXapm. + Instead, this task is handled by the generic part of the dataconverter + during the verification of the template dictionary. + """ + + def __init__(self, file_name: str, entry_id: int): + print(f"Extracting data from ELN file: {file_name}") + if (file_name.rsplit('/', 1)[-1].startswith("eln_data") + or file_name.startswith("eln_data")) and entry_id > 0: + self.entry_id = entry_id + self.file_name = file_name + with open(self.file_name, "r", encoding="utf-8") as stream: + self.yml = fd.FlatDict(yaml.safe_load(stream), delimiter="/") + else: + self.entry_id = 1 + self.file_name = "" + self.yml = {} + + def parse_user_section(self, template: dict) -> dict: + """Copy data from user section into template.""" + src = "user" + if src in self.yml.keys(): + if isinstance(self.yml[src], list): + if (all(isinstance(entry, dict) for entry in self.yml[src]) is True): + user_id = 1 + # custom schema delivers a list of dictionaries... + for user_dict in self.yml[src]: + # ... for each of them inspect for fields mappable on NeXus + identifier = [self.entry_id, user_id] + # identifier to get instance NeXus path from variadic NeXus path + # try to find all quantities on the left-hand side of the mapping + # table and check if we can find these + for nx_path, modifier in NxUserFromListOfDict.items(): + if (nx_path != "IGNORE") and (nx_path != "UNCLEAR"): + trg = variadic_path_to_specific_path(nx_path, identifier) + res = apply_modifier(modifier, user_dict) + if res is not None: + template[trg] = res + user_id += 1 + return template + + def parse_sample_section(self, template: dict) -> dict: + """Copy data from sample section into template.""" + src = "sample/atom_types" + trg = f"/ENTRY[entry{self.entry_id}]/{src}" + if "sample/atom_types" in self.yml.keys(): + if (isinstance(self.yml[src], list)) and (len(self.yml[src]) >= 1): + atom_types_are_valid = True + for symbol in self.yml[src]: + valid = isinstance(symbol, str) \ + and (symbol in chemical_symbols) and (symbol != "X") + if valid is False: + atom_types_are_valid = False + break + if atom_types_are_valid is True: + template[trg] = ", ".join(list(self.yml[src])) + + for nx_path, modifier in NxSample.items(): + if (nx_path != "IGNORE") and (nx_path != "UNCLEAR"): + trg = variadic_path_to_specific_path(nx_path, [self.entry_id]) + res = apply_modifier(modifier, self.yml) + if res is not None: + template[trg] = res + + return template + + def parse_detector_section(self, template: dict) -> dict: + """Copy data from detector section into template.""" + src = "em_lab/detector" + if src in self.yml.keys(): + if isinstance(self.yml[src], list): + if (all(isinstance(entry, dict) for entry in self.yml[src]) is True): + detector_id = 1 + # custom schema delivers a list of dictionaries... + for detector_dict in self.yml[src]: + # ... for each of them inspect for fields mappable on NeXus + identifier = [self.entry_id, detector_id] + # identifier to get instance NeXus path from variadic NeXus path + # try to find all quantities on the left-hand side of the mapping + # table and check if we can find these + for nx_path, modifier in NxDetectorListOfDict.items(): + if (nx_path != "IGNORE") and (nx_path != "UNCLEAR"): + trg = variadic_path_to_specific_path(nx_path, identifier) + res = apply_modifier(modifier, detector_dict) + if res is not None: + template[trg] = res + detector_id += 1 + + return template + + def parse_other_sections(self, template: dict) -> dict: + """Copy data from custom schema (excluding user, sample) into template.""" + for nx_path, modifier in NxEmElnInput.items(): + if (nx_path != "IGNORE") and (nx_path != "UNCLEAR"): + trg = variadic_path_to_specific_path(nx_path, [self.entry_id, 1]) + res = apply_modifier(modifier, self.yml) + if res is not None: + template[trg] = res + return template + + def parse(self, template: dict) -> dict: + """Copy data from self into template the appdef instance.""" + self.parse_user_section(template) + self.parse_detector_section(template) + self.parse_sample_section(template) + self.parse_other_sections(template) + + debugging = False + if debugging is True: + for keyword, value in template.items(): + print(f"{keyword}, {value}") + return template diff --git a/pynxtools/dataconverter/readers/em_nion/utils/swift_dimscale_axes.py b/pynxtools/dataconverter/readers/em_nion/utils/swift_dimscale_axes.py new file mode 100644 index 000000000..cdc15e895 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/utils/swift_dimscale_axes.py @@ -0,0 +1,59 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Utility for creating list of numpy arrays with dimension scale axes values.""" + +# pylint: disable=no-member + +from typing import List, Any + +import numpy as np + +from pynxtools.dataconverter.readers.em_nion.concepts.swift_display_items_to_nx_concepts \ + import metadata_constraints, check_existence_of_required_fields # nexus_concept_dict + + +def get_list_of_dimension_scale_axes(dct: dict) -> list: # , concept_key: str + """Create a list of dimension scale axes value, unit tuples.""" + # use only when we know already onto which concept a display_item will be mapped + axes: List[Any] = [] + if (check_existence_of_required_fields(dct, metadata_constraints) is False): + return axes + # or concept_key not in nexus_concept_dict.keys(): + # if nexus_concept_dict[concept_key] is None: + # return axes + if len(dct["dimensional_calibrations"]) != len(dct["data_shape"]): + return axes + + print(dct["dimensional_calibrations"]) + print(dct["data_shape"]) + + for idx in np.arange(0, len(dct["dimensional_calibrations"])): + nvalues = dct["data_shape"][idx] + axis_dict = dct["dimensional_calibrations"][idx] + if isinstance(nvalues, int) and isinstance(axis_dict, dict): + if (nvalues > 0) \ + and (set(axis_dict.keys()) == set(["offset", "scale", "units"])): + start = axis_dict["offset"] + 0.5 * axis_dict["scale"] + stop = axis_dict["offset"] + ((nvalues - 1) + 0.5) * axis_dict["scale"] + axes.append( + {"value": np.asarray(np.linspace(start, + stop, + num=nvalues, + endpoint=True), np.float64), + "unit": axis_dict["units"]}) + return axes diff --git a/pynxtools/dataconverter/readers/em_nion/utils/swift_uuid_to_file_name.py b/pynxtools/dataconverter/readers/em_nion/utils/swift_uuid_to_file_name.py new file mode 100644 index 000000000..0a435110c --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/utils/swift_uuid_to_file_name.py @@ -0,0 +1,40 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Create nionswift mapping from ns UUID to suffix used for identifying files in project.""" + +# pylint: disable=no-member,too-many-instance-attributes + +import uuid + +# see https://github.com/nion-software/nionswift/blob/e95839c5602d009006ea88a648e5f78dc77c1ea4/ +# nion/swift/model/Profile.py line 146 and following + + +def encode(uuid_: uuid.UUID, alphabet: str) -> str: + result = str() + uuid_int = uuid_.int + while uuid_int: + uuid_int, digit = divmod(uuid_int, len(alphabet)) + result += alphabet[digit] + return result + + +def uuid_to_file_name(data_item_uuid_str: str) -> str: + data_item_uuid_uuid = uuid.UUID(f'{data_item_uuid_str}') + return f'data_{encode(data_item_uuid_uuid, "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890")}' + # 25 character results diff --git a/pynxtools/dataconverter/readers/em_nion/utils/swift_zipped_project_parser.py b/pynxtools/dataconverter/readers/em_nion/utils/swift_zipped_project_parser.py new file mode 100644 index 000000000..f72f7d48c --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/utils/swift_zipped_project_parser.py @@ -0,0 +1,367 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Parse display_items inside a zip file generated from compressing a swift project dir.""" + +# pylint: disable=no-member + +# from typing import Dict, Any, List, Tuple + +import yaml + +import json + +import flatdict as fd + +import nion.swift.model.NDataHandler as nsnd + +import numpy as np + +import h5py + +from zipfile37 import ZipFile + +from pynxtools.dataconverter.readers.em_nion.utils.swift_uuid_to_file_name \ + import uuid_to_file_name + +from pynxtools.dataconverter.readers.em_nion.utils.swift_dimscale_axes \ + import get_list_of_dimension_scale_axes + +from pynxtools.dataconverter.readers.em_nion.concepts.swift_display_items_to_nx_concepts \ + import nexus_concept_dict, identify_nexus_concept_key + +from pynxtools.dataconverter.readers.em_nion.concepts.swift_handle_nx_concepts \ + import apply_modifier, variadic_path_to_specific_path + +from pynxtools.dataconverter.readers.em_nion.concepts.nx_image_real_space \ + import NxImageRealSpaceDict + +from pynxtools.dataconverter.readers.em_nion.utils.versioning \ + import NX_EM_NION_SWIFT_NAME, NX_EM_NION_SWIFT_VERSION +from pynxtools.dataconverter.readers.em_nion.utils.versioning \ + import NX_EM_NION_EXEC_NAME, NX_EM_NION_EXEC_VERSION + + +class NxEmNionSwiftProjectParser: + """Parse NionSwift project file. + + """ + + def __init__(self, file_name, entry_id): + """Class wrapping swift parser.""" + self.file_name = file_name + self.entry_id = entry_id + # counters which keep track of how many instances of NXevent_data_em have + # been instantiated, this implementation currently maps each display_items + # onto an own NXevent_data_em instance + self.event_data_written = False + self.event_data_em_id = 1 + self.image_id = 1 + self.spectrum_id = 1 + self.proj_file_names = [] + # assure that there is exactly one *.nsproj file only to parse from + self.ndata_file_dict = {} + # just get the *.ndata files irrespective whether they will be parsed later + self.hdf_file_dict = {} + # just get the *.h5 files irrespective whether they will be interpreted + + def check_project_file(self): + """Inspect the content of the compressed project file to check if supported.""" + # file_name = "2022-02-18_Metadata_Kuehbach.zip.nionswift" + with ZipFile(self.file_name) as zip_file_hdl: + for file in zip_file_hdl.namelist(): + if file.endswith(".h5"): + key = file[file.rfind("/") + 1:].replace(".h5", "") + if key not in self.hdf_file_dict: + self.hdf_file_dict[key] = file + elif file.endswith(".ndata"): + key = file[file.rfind("/") + 1:].replace(".ndata", "") + if key not in self.ndata_file_dict: + self.ndata_file_dict[key] = file + elif file.endswith(".nsproj"): + self.proj_file_names.append(file) + else: + continue + if not self.ndata_file_dict.keys().isdisjoint(self.hdf_file_dict.keys()): + print("Keys of *.ndata and *.h5 files in project are not disjoint!") + return False + if len(self.proj_file_names) != 1: + print("The project contains either no or more than one nsproj file!") + return False + print(self.proj_file_names) + for key, val in self.ndata_file_dict.items(): + print(f"{key}, {val}") + for key, val in self.hdf_file_dict.items(): + print(f"{key}, {val}") + return True + + def add_nx_image_real_space(self, meta, arr, template): + """Add data and metadata for an instance of concept NxImageRealSpace.""" + # metadata + identifier = [self.entry_id, self.event_data_em_id, self.image_id] + for nx_path, modifier in NxImageRealSpaceDict.items(): + if (nx_path != "IGNORE") and (nx_path != "UNCLEAR"): + # print(nx_path) + # instance_identifier = list(np.repeat(1, nx_path.count("*"))) + # print(instance_identifier) + trg = variadic_path_to_specific_path(nx_path, identifier) + # print(trg) + template[trg] = apply_modifier(modifier, meta) + + # array data + axes_lst = get_list_of_dimension_scale_axes(meta) + # print(axes_lst) + + axes_names = [("axis_image_identifier", "image_identifier", 2), + ("axis_y", "y", 1), + ("axis_x", "x", 0)] + print(f"Add NXdata len(axes_lst) {len(axes_lst)}, len(axes_names) {len(axes_names)}") + if 2 <= len(axes_lst) <= len(axes_names): + trg = f"/ENTRY[entry{self.entry_id}]/measurement/EVENT_DATA_EM[event_data_em" \ + f"{self.event_data_em_id}]/IMAGE_SET[image_set{self.image_id}]/" \ + f"PROCESS[process]" + template[f"{trg}/source"] = "n/a" + template[f"{trg}/source/@version"] = "n/a" + template[f"{trg}/PROGRAM[program1]/program"] \ + = f"We do not know because the nsproj file does not store it explicitly "\ + f"which nionswift version and dependencies are used when writing "\ + f"the nsproj file!" + template[f"{trg}/PROGRAM[program1]/program/@version"] = "not recoverable" + template[f"{trg}/PROGRAM[program2]/program"] \ + = f"{NX_EM_NION_SWIFT_NAME}" + template[f"{trg}/PROGRAM[program2]/program/@version"] \ + = f"{NX_EM_NION_SWIFT_VERSION}" + template[f"{trg}/PROGRAM[program3]/program"] \ + = f"{NX_EM_NION_EXEC_NAME}" + template[f"{trg}/PROGRAM[program3]/program/@version"] \ + = f"{NX_EM_NION_EXEC_VERSION}" + + trg = f"/ENTRY[entry{self.entry_id}]/measurement/EVENT_DATA_EM[event_data_em" \ + f"{self.event_data_em_id}]/IMAGE_SET[image_set{self.image_id}]/DATA[stack]" + template[f"{trg}/@NX_class"] = "NXdata" # ##TODO one should not need to add this manually + template[f"{trg}/title"] = str("Should come from NionSwift directly") + template[f"{trg}/@signal"] = "data_counts" + template[f"{trg}/@axes"] = ["axis_image_identifier", "axis_y", "axis_x"] + for idx in np.arange(0, 3): + template[f"{trg}/@AXISNAME_indices[{axes_names[idx][0]}_indices]"] \ + = np.uint32(axes_names[idx][2]) + # the following three lines would be required by H5Web to plot RGB maps + # template[f"{trg}/@CLASS"] = "IMAGE" + # template[f"{trg}/@IMAGE_VERSION"] = "1.2" + # template[f"{trg}/@SUBCLASS_VERSION"] = np.int64(15) + + if len(axes_lst) == 2: + ny, nx = np.shape(arr) + template[f"{trg}/data_counts"] \ + = {"compress": np.reshape(arr, (1, ny, nx), order="C"), "strength": 1} + template[f"{trg}/data_counts/@long_name"] = "Signal" + # no image_identifier axis available + template[f"{trg}/AXISNAME[{axes_names[0][0]}]"] \ + = {"compress": np.asarray([1], np.uint32), "strength": 1} + template[f"{trg}/AXISNAME[{axes_names[0][0]}]/@long_name"] \ + = f"Image identifier (a. u.)" + template[f"{trg}/AXISNAME[{axes_names[0][0]}]/@units"] = "" + for idx in [1, 2]: + template[f"{trg}/AXISNAME[{axes_names[idx][0]}]"] \ + = {"compress": axes_lst[idx - 1]["value"], "strength": 1} + template[f"{trg}/AXISNAME[{axes_names[idx][0]}]/@long_name"] \ + = f"Calibrated position along {axes_names[idx][1]}-axis " \ + f"({axes_lst[idx - 1]['unit']})" + template[f"{trg}/AXISNAME[{axes_names[idx][0]}]/@units"] \ + = f"{axes_lst[idx - 1]['unit']}" + else: # len(axes_lst) == 3 + template[f"{trg}/data_counts"] = {"compress": arr, "strength": 1} + for idx in [0, 1, 2]: + # TODO check that casting works properly + template[f"{trg}/AXISNAME[{axes_names[idx][0]}]"] \ + = {"compress": np.asarray(axes_lst[idx]["value"], np.uint32), + "strength": 1} + template[f"{trg}/AXISNAME[{axes_names[idx][0]}]/@long_name"] \ + = f"Calibrated position along {axes_names[idx][1]}-axis " \ + f"({axes_lst[idx]['unit']})" + template[f"{trg}/AXISNAME[{axes_names[idx][0]}]/@units"] \ + = f"{axes_lst[idx]['unit']}" + + self.image_id += 1 + self.event_data_written = True + return template + + def update_event_identifier(self): + """Advance and reset bookkeeping of event data em and data instances.""" + if self.event_data_written is True: + self.event_data_em_id += 1 + self.event_data_written = False + self.image_id = 1 + self.spectrum_id = 1 + # because either we found that the display item is fed from an H5 or from an NDATA + # print(f"Identifier at {self.entry_id}, {self.event_data_em_id}, {self.image_id}, {self.spectrum_id}") + + def map_to_nexus(self, meta, arr, concept_name, template): + """Create the actual instance of a specific set of NeXus concepts in template.""" + # meta is an flatdict + # arr is a numpy array + if concept_name == "NxImageSetRealSpace": + print(f"Adding an instance of concept {concept_name}") + self.add_nx_image_real_space(meta, arr, template) + else: + print(f"Ignoring concept {concept_name} because not yet implemented") + + self.update_event_identifier() + return template + + def process_ndata(self, file_hdl, full_path, template): + """Handle reading and processing of opened *.ndata inside the ZIP file.""" + # assure that we start reading that file_hdl/pointer from the beginning... + file_hdl.seek(0) + local_files, dir_files, eocd = nsnd.parse_zip(file_hdl) + # ...now that pointer might point somewhere... + flat_metadata_dict = {} + data_arr = None + nx_concept_name = "" + + for offset, tpl in local_files.items(): + # print(f"{tpl}") + if tpl[0] == b'metadata.json': + print(f"Extract metadata.json from {full_path} at offset {offset}") + # ... explicit jump back to beginning of the file + file_hdl.seek(0) + metadata_dict = nsnd.read_json(file_hdl, + local_files, + dir_files, + b'metadata.json') + + nx_concept_key = identify_nexus_concept_key(metadata_dict) + nx_concept_name = nexus_concept_dict[nx_concept_key] + print(f"Display_item {full_path}, concept {nx_concept_key}, maps {nx_concept_name}") + + flat_metadata_dict = fd.FlatDict(metadata_dict, delimiter='/') + break + # because we expect (based on Benedikt's example) to find only one json file + # in that *.ndata file pointed to by file_hdl + + if flat_metadata_dict == {}: # only continue if some metadata were retrieved + return template + + for offset, tpl in local_files.items(): + # print(f"{tpl}") + if tpl[0] == b'data.npy': + print(f"Extract data.npy from {full_path} at offset {offset}") + file_hdl.seek(0) + data_arr = nsnd.read_data(file_hdl, + local_files, + dir_files, + b'data.npy') + break + # because we expect (based on Benedikt's example) to find only one npy file + # in that *.ndata file pointed to by file_hdl + + print(f"data_arr type {data_arr.dtype}, shape {np.shape(data_arr)}") + # check on the integriety of the data_arr array that it is not None or empty + # this should be done more elegantly by just writing the + # data directly into the template and not creating another copy + + self.map_to_nexus(flat_metadata_dict, data_arr, nx_concept_name, template) + del flat_metadata_dict + del data_arr + del nx_concept_name + return template + + def process_hdf(self, file_hdl, full_path, template): + """Handle reading and processing of opened *.h5 inside the ZIP file.""" + flat_metadata_dict = {} + data_arr = None + nx_concept_name = "" + + file_hdl.seek(0) + h5r = h5py.File(file_hdl, "r") + metadata_dict = json.loads(h5r["data"].attrs["properties"]) + + nx_concept_key = identify_nexus_concept_key(metadata_dict) + nx_concept_name = nexus_concept_dict[nx_concept_key] + print(f"Display_item {full_path}, concept {nx_concept_key}, maps {nx_concept_name}") + + flat_metadata_dict = fd.FlatDict(metadata_dict, delimiter='/') + + if flat_metadata_dict == {}: # only continue if some metadata were retrieved + return template + + data_arr = h5r["data"][()] + h5r.close() + + print(f"data_arr type {data_arr.dtype}, shape {np.shape(data_arr)}") + # check on the integriety of the data_arr array that it is not None or empty + # this should be done more elegantly by just writing the + # data directly into the template and not creating another copy + self.map_to_nexus(flat_metadata_dict, data_arr, nx_concept_name, template) + del flat_metadata_dict + del data_arr + del nx_concept_name + return template + + def parse_project_file(self, template: dict) -> dict: + """Parse lazily from compressed NionSwift project (nsproj + directory).""" + swift_proj_dict = {} + with ZipFile(self.file_name) as zip_file_hdl: + with zip_file_hdl.open(self.proj_file_names[0]) as file_hdl: + # with open(file_name, 'r') as stream: + swift_proj_dict = fd.FlatDict(yaml.safe_load(file_hdl), delimiter='/') + # for entry in swift_proj_dict["display_items"]: + # if isinstance(entry, dict): + # for key, val in entry.items(): + # print(f"{key}, {val}") + if swift_proj_dict == {}: + return template + + for itm in swift_proj_dict["display_items"]: + if set(["type", "uuid", "created", "display_data_channels"]).issubset(itm.keys()): + if len(itm["display_data_channels"]) == 1: + if "data_item_reference" in itm["display_data_channels"][0].keys(): + key = uuid_to_file_name( + itm["display_data_channels"][0]["data_item_reference"]) + # file_name without the mime type + if key in self.ndata_file_dict: + print(f"Key {key} is *.ndata maps to {self.ndata_file_dict[key]}") + with ZipFile(self.file_name) as zip_file_hdl: + print(f"Parsing {self.ndata_file_dict[key]}...") + with zip_file_hdl.open(self.ndata_file_dict[key]) as file_hdl: + self.process_ndata( + file_hdl, + self.ndata_file_dict[key], + template) + elif key in self.hdf_file_dict: + print(f"Key {key} is *.h5 maps to {self.hdf_file_dict[key]}") + with ZipFile(self.file_name) as zip_file_hdl: + print(f"Parsing {self.hdf_file_dict[key]}...") + with zip_file_hdl.open(self.hdf_file_dict[key]) as file_hdl: + self.process_hdf( + file_hdl, + self.hdf_file_dict[key], + template) + else: + print(f"Key {key} has no corresponding data file") + return template + + def parse(self, template: dict) -> dict: + """Parse NOMAD OASIS relevant data and metadata from swift project.""" + print("Parsing lazily from compressed NionSwift project (nsproj + directory)...") + print(self.file_name) + print(f"{self.entry_id}") + if self.check_project_file() is False: + return template + + self.parse_project_file(template) + return template diff --git a/pynxtools/dataconverter/readers/em_nion/utils/use_case_selector.py b/pynxtools/dataconverter/readers/em_nion/utils/use_case_selector.py new file mode 100644 index 000000000..7a817e91a --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/utils/use_case_selector.py @@ -0,0 +1,83 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Utility class handling which community/technology partner files for em_nion reader.""" + +# pylint: disable=no-member,duplicate-code,too-few-public-methods + +from typing import Tuple, Dict, List + + +class EmNionUseCaseSelector: + """Decision maker about what needs to be parsed given arbitrary input. + + Users might invoke this dataconverter with arbitrary input, no input, or + too much input. The UseCaseSelector decides what to do in each case. + """ + + def __init__(self, file_paths: Tuple[str] = None): + """Analyze with which parser(s) (if any) input can be handled.""" + self.mime_types: Dict[str, list] = {} + self.eln_parser_type: str = "none" + self.eln: List[str] = [] + self.prj_parser_type: str = "none" + self.prj: List[str] = [] + self.is_valid = False + + self.analyze_mime_types(file_paths) + self.identify_case() + + def analyze_mime_types(self, file_paths: Tuple[str] = None): + """Accept/reject and organize input-files based on mime-type.""" + self.supported_mime_types = ["yaml", "yml", "nionswift"] + for mime_type in self.supported_mime_types: + self.mime_types[mime_type] = [] + for file_name in file_paths: + index = file_name.lower().rfind(".") + if index >= 0: + suffix = file_name.lower()[index + 1::] + add = (suffix in self.supported_mime_types) \ + and (file_name not in self.mime_types[suffix]) + if add is True: + self.mime_types[suffix].append(file_name) + print(self.mime_types) + + def identify_case(self): + """Identify which sub-parsers to use if any based on input mime_types.""" + # eln + self.is_valid = False + for mime_type in ["yaml", "yml"]: + self.eln += self.mime_types[mime_type] + if len(self.eln) == 1: + self.eln_parser_type = "generic" + self.is_valid = True + else: + self.eln_parser_type = "none" + + if "nionswift" in self.mime_types: + if len(self.mime_types["nionswift"]) == 1: + self.prj += self.mime_types["nionswift"] + self.prj_parser_type = "nionswift" + else: + self.is_valid = False + + print("Input suggests to use the following sub-parsers:") + print(f"ELN parser: {self.eln_parser_type}") + print(self.eln) + print(f"Data parser: {self.prj_parser_type}") + print(self.prj) + print(f"Input suggests that parsing is valid: {self.is_valid}") diff --git a/pynxtools/dataconverter/readers/em_nion/utils/versioning.py b/pynxtools/dataconverter/readers/em_nion/utils/versioning.py new file mode 100644 index 000000000..fa65ae0f3 --- /dev/null +++ b/pynxtools/dataconverter/readers/em_nion/utils/versioning.py @@ -0,0 +1,34 @@ +# +# Copyright The NOMAD Authors. +# +# This file is part of NOMAD. See https://nomad-lab.eu for further info. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""Utility tool constants and versioning.""" + +# pylint: disable=no-member + +from pynxtools.dataconverter.readers.shared.shared_utils \ + import get_repo_last_commit + + +NX_EM_NION_ADEF_NAME = "NXem" +NX_EM_NION_ADEF_VERSION = "nexus-fairmat-proposal successor of " \ + "9636feecb79bb32b828b1a9804269573256d7696" +# based on https://fairmat-experimental.github.io/nexus-fairmat-proposal +NX_EM_NION_EXEC_NAME = "dataconverter/reader/em_nion/reader.py" +NX_EM_NION_EXEC_VERSION = get_repo_last_commit() + +NX_EM_NION_SWIFT_NAME = "nionswift" +NX_EM_NION_SWIFT_VERSION = "0.16.8" diff --git a/pynxtools/dataconverter/readers/em_om/utils/generic_eln_io.py b/pynxtools/dataconverter/readers/em_om/utils/generic_eln_io.py index bad9e1294..4a97ecb60 100644 --- a/pynxtools/dataconverter/readers/em_om/utils/generic_eln_io.py +++ b/pynxtools/dataconverter/readers/em_om/utils/generic_eln_io.py @@ -51,23 +51,21 @@ class NxEmOmGenericElnSchemaParser: def __init__(self, file_name: str, entry_id: int, pattern_simulation: bool): """Fill template with ELN pieces of information.""" - self.file_name = file_name - if file_name != "": - self.file_name = file_name - else: - self.file_name = "" - if entry_id > 0: + self.pattern_simulation = pattern_simulation + print(f"Extracting data from ELN file: {file_name}") + if (file_name.rsplit('/', 1)[-1].startswith("eln_data") + or file_name.startswith("eln_data")) and entry_id > 0: self.entry_id = entry_id + self.file_name = file_name + with open(self.file_name, "r", encoding="utf-8") as stream: + self.yml = fd.FlatDict(yaml.safe_load(stream), delimiter=":") else: self.entry_id = 1 - self.pattern_simulation = pattern_simulation + self.file_name = "" + self.yml = {} - print(self.file_name) - self.yml = None - with open(self.file_name, "r", encoding="utf-8") as stream: - self.yml = fd.FlatDict(yaml.safe_load(stream), delimiter=":") - if "ElectronBackscatterDiffraction" in self.yml: - self.yml = self.yml["ElectronBackscatterDiffraction"] + # if "ElectronBackscatterDiffraction" in self.yml: + # self.yml = self.yml["ElectronBackscatterDiffraction"] def parse(self, template: dict) -> dict: """Extract metadata from generic ELN text file to respective NeXus objects.""" @@ -278,7 +276,7 @@ def parse_detector_frame_section(self, template: dict) -> dict: def parse_gnomonic_projection_section(self, template: dict) -> dict: """Parse for the gnomonic projection.""" axes_names = ["x", "y", "z"] - src = "gnomonic_projection_reference_frame" + src = "gnomonic_projection:gnomonic_projection_reference_frame" trg = f"/ENTRY[entry{self.entry_id}]/conventions" \ f"/gnomonic_projection_reference_frame/" if "reference_frame_type" in self.yml[src].keys(): @@ -298,7 +296,7 @@ def parse_gnomonic_projection_section(self, template: dict) -> dict: if "origin" in self.yml[src].keys(): template[f"{trg}origin"] = self.yml[f"{src}:origin"] - src = "pattern_centre" + src = "gnomonic_projection:pattern_centre" trg = f"/ENTRY[entry{self.entry_id}]/conventions/pattern_centre/" axes_names = ["x", "y"] field_names = ["axis_boundary_convention", "axis_normalization_direction"] diff --git a/pynxtools/dataconverter/readers/em_spctrscpy/utils/em_generic_eln_io.py b/pynxtools/dataconverter/readers/em_spctrscpy/utils/em_generic_eln_io.py index 08c745d1d..cdea0cfee 100644 --- a/pynxtools/dataconverter/readers/em_spctrscpy/utils/em_generic_eln_io.py +++ b/pynxtools/dataconverter/readers/em_spctrscpy/utils/em_generic_eln_io.py @@ -50,12 +50,15 @@ class NxEmNomadOasisElnSchemaParser: """ def __init__(self, file_name: str, entry_id: int): - self.entry_id = entry_id - if file_name.startswith("eln_data") and entry_id > 0: + print(f"Extracting data from ELN file: {file_name}") + if (file_name.rsplit('/', 1)[-1].startswith("eln_data") + or file_name.startswith("eln_data")) and entry_id > 0: + self.entry_id = entry_id self.file_name = file_name with open(self.file_name, "r", encoding="utf-8") as stream: self.yml = fd.FlatDict(yaml.safe_load(stream), delimiter=":") else: + self.entry_id = 1 self.file_name = "" self.yml = {} diff --git a/pynxtools/dataconverter/readers/em_spctrscpy/utils/em_nexus_plots.py b/pynxtools/dataconverter/readers/em_spctrscpy/utils/em_nexus_plots.py index f38d61e40..1e51a6e2a 100644 --- a/pynxtools/dataconverter/readers/em_spctrscpy/utils/em_nexus_plots.py +++ b/pynxtools/dataconverter/readers/em_spctrscpy/utils/em_nexus_plots.py @@ -139,12 +139,12 @@ def image_plot_available(template: dict, entry_id: int) -> bool: """Choose a preferred NXdata/data instance for generic image.""" entry_name = f"entry{entry_id}" trg = f"/ENTRY[{entry_name}]/measurement/EVENT_DATA_EM[event_data_em1]/" \ - f"IMAGE_SET_EM[image_set_em1]/" + f"IMAGE_SET[image_set1]/" path = "" - if f"{trg}stack/DATA[data_counts]" in template.keys(): + if f"{trg}DATA[stack]/data_counts" in template.keys(): assert isinstance( - template[f"{trg}stack/DATA[data_counts]"]["compress"], np.ndarray), \ + template[f"{trg}DATA[stack]/data_counts"]["compress"], np.ndarray), \ "Generic image data stack not existent!" path = "stack" @@ -157,8 +157,8 @@ def image_plot_available(template: dict, entry_id: int) -> bool: trg += "measurement/" template[f"{trg}@default"] = "event_data_em1" trg += "EVENT_DATA_EM[event_data_em1]/" - template[f"{trg}@default"] = "image_set_em1" - trg += "IMAGE_SET_EM[image_set_em1]/" + template[f"{trg}@default"] = "image_set1" + trg += "IMAGE_SET[image_set1]/" template[f"{trg}@default"] = path return True diff --git a/pynxtools/definitions b/pynxtools/definitions index 7bf501ff6..970e55d63 160000 --- a/pynxtools/definitions +++ b/pynxtools/definitions @@ -1 +1 @@ -Subproject commit 7bf501ff611bc49a66609b2cb3e1f3f2b51ae098 +Subproject commit 970e55d6381a6418eac3045a2df034eaf8b8b5b3 diff --git a/pyproject.toml b/pyproject.toml index 4774e5710..b2e665da5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,8 @@ dependencies = [ "kikuchipy>=0.8.2", "pyxem>=0.14.2", "zipfile37==0.1.3", + "nionswift==0.16.8", + "tzlocal<=4.3", "scipy>=1.7.1", "lark>=1.1.5", "requests", diff --git a/tests/data/dataconverter/readers/apm/nomad_oasis_eln_schema_for_nx_apm/nxapm.schema.archive.yaml b/tests/data/dataconverter/readers/apm/nomad_oasis_eln_schema_for_nx_apm/nxapm.schema.archive.yaml index a7ab9fccf..a750d3a80 100644 --- a/tests/data/dataconverter/readers/apm/nomad_oasis_eln_schema_for_nx_apm/nxapm.schema.archive.yaml +++ b/tests/data/dataconverter/readers/apm/nomad_oasis_eln_schema_for_nx_apm/nxapm.schema.archive.yaml @@ -3,16 +3,12 @@ # quantity in NXapm definitions: name: 'apm' - # 'ELN/application definition schema for atom probe microscopy (APM) experiments.' - sections: # section definitions what were back in the old days msection base classes - # Operator: - # Specimen: + # 'ELN/matching NXapm NeXus appdef data schema for atom probe' + sections: # section definitions what were back in the old days msections AtomProbeMicroscopy: # the actual schema - # nomad.datamodel.metainfo.eln.NexusParser base_sections: - - 'nomad.datamodel.metainfo.eln.NexusParser' + - 'nomad.datamodel.metainfo.eln.NexusDataConverter' - 'nomad.datamodel.data.EntryData' - # base_section: nomad.datamodel.data.EntryData m_annotations: # Here you can set your default values for the reader and nxdl. template: @@ -300,15 +296,24 @@ definitions: m_annotations: eln: quantities: + status: + type: + type_kind: Enum + type_data: + - success + - failure + description: | + A statement whether the measurement was + successful or failed prematurely. + m_annotations: + eln: + component: RadioEnumEditQuantity instrument_name: type: str description: Given name of the atom probe at the hosting institution. m_annotations: eln: component: StringEditQuantity - location: - type: str - description: Location of the lab or place where the instrument is installed. Using GEOREF is preferred. # (NXfabrication): flight_path_length: type: np.float64 @@ -322,13 +327,6 @@ definitions: defaultDisplayUnit: meter minValue: 0.0 maxValue: 10.0 - field_of_view(NX_FLOAT): - type: np.float64 - unit: meter - description: | - The nominal diameter of the specimen ROI which is measured in the - experiment. Physically, the specimen cannot be measured completely - because ions may launch but not become detected or hit elsewhere. fabrication_vendor: type: str description: Name of the manufacturer/company, i.e. AMETEK/Cameca. diff --git a/tests/data/dataconverter/readers/em_nion/README.md b/tests/data/dataconverter/readers/em_nion/README.md new file mode 100644 index 000000000..d554fa343 --- /dev/null +++ b/tests/data/dataconverter/readers/em_nion/README.md @@ -0,0 +1,2 @@ +This is a place for storing data for tests which verify the functionality of the em_spctrscpy reader. +An example for how to use the em_spctrscpy reader is available in _pynxtools/examples/em_. diff --git a/tests/data/dataconverter/readers/em_spctrscpy/nomad_oasis_eln_schema_for_nx_em/eln_data_em.yaml b/tests/data/dataconverter/readers/em_nion/nomad_oasis_eln_schema_for_nx_em/nxem.schema.archive.yaml similarity index 96% rename from tests/data/dataconverter/readers/em_spctrscpy/nomad_oasis_eln_schema_for_nx_em/eln_data_em.yaml rename to tests/data/dataconverter/readers/em_nion/nomad_oasis_eln_schema_for_nx_em/nxem.schema.archive.yaml index 73d2933fa..71ed0ebcf 100644 --- a/tests/data/dataconverter/readers/em_spctrscpy/nomad_oasis_eln_schema_for_nx_em/eln_data_em.yaml +++ b/tests/data/dataconverter/readers/em_nion/nomad_oasis_eln_schema_for_nx_em/nxem.schema.archive.yaml @@ -5,18 +5,14 @@ definitions: name: 'em' # 'ELN/application definition schema for electron microscopy (EM) experiments.' sections: # section definitions what were back in the old days msection base classes - # Operator: - # Specimen: ElectronMicroscopy: # the actual schema - # nomad.datamodel.metainfo.eln.NexusParser base_sections: - - 'nomad.datamodel.metainfo.eln.NexusParser' + - 'nomad.datamodel.metainfo.eln.NexusDataConverter' - 'nomad.datamodel.data.EntryData' - # base_section: nomad.datamodel.data.EntryData m_annotations: # Here you can set your default values for the reader and nxdl. template: - reader: em_spctrscpy + reader: em_nion nxdl: NXem.nxdl # Listing quantities in the hide component will not show them in the ELN. # This would be useful to make the default values set in `template` fixed. @@ -37,7 +33,7 @@ definitions: type: type_kind: Enum type_data: - - 'nexus-fairmat-proposal successor of 50433d9039b3f33299bab338998acb5335cd8951' + - 'nexus-fairmat-proposal successor of 9636feecb79bb32b828b1a9804269573256d7696' description: Hashvalue of the NeXus application definition file m_annotations: eln: @@ -333,7 +329,7 @@ definitions: eln: # quantities: sub_sections: - electron_gun: + electron_source: section: description: The source which creates the electron beam m_annotations: @@ -354,17 +350,16 @@ definitions: m_annotations: eln: component: NumberEditQuantity - defaultDisplayUnit: kilovolt + defaultDisplayUnit: kV minValue: 0.0 maxValue: 1.1e6 emitter_type: type: type_kind: Enum type_data: - - filament + - thermionic - schottky - - cold_cathode_field_emitter - - other + - field_emission description: | Emitter type used to create the beam. m_annotations: @@ -481,9 +476,9 @@ definitions: m_annotations: eln: quantities: - type: + local_name: type: str - description: Free text option to write further details about the detector. + description: Instrument-specific alias/name m_annotations: eln: component: StringEditQuantity diff --git a/tests/data/dataconverter/readers/em_om/nomad_oasis_eln_schema_for_nx_em_ebsd/nxem_ebsd.schema.archive.yaml b/tests/data/dataconverter/readers/em_om/nomad_oasis_eln_schema_for_nx_em_ebsd/nxem_ebsd.schema.archive.yaml index 9c1d5ca0d..449567df4 100644 --- a/tests/data/dataconverter/readers/em_om/nomad_oasis_eln_schema_for_nx_em_ebsd/nxem_ebsd.schema.archive.yaml +++ b/tests/data/dataconverter/readers/em_om/nomad_oasis_eln_schema_for_nx_em_ebsd/nxem_ebsd.schema.archive.yaml @@ -4,19 +4,15 @@ definitions: name: 'em_om' # 'ELN for collecting conventions relevant for interpreting orientation microscopy data.' sections: # section definitions what were back in the old days msection base classes - # Operator: - # Specimen: - ElectronBackscatterDiffraction: # the actual schema - # nomad.datamodel.metainfo.eln.NexusParser + ElectronBackscatterDiffraction: # the actual schema´ base_sections: - - 'nomad.datamodel.metainfo.eln.NexusParser' + - 'nomad.datamodel.metainfo.eln.NexusDataConverter' - 'nomad.datamodel.data.EntryData' - # base_section: nomad.datamodel.data.EntryData m_annotations: # Here you can set your default values for the reader and nxdl. template: reader: em_om - nxdl: NXem.nxdl + nxdl: NXem_ebsd.nxdl # Listing quantities in the hide component will not show them in the ELN. # This would be useful to make the default values set in `template` fixed. # Leave the hide key even if you want to pass an empty list like in this example. @@ -36,8 +32,7 @@ definitions: type: type_kind: Enum type_data: - - NXem_ebsd - default: NXem_ebsd + - 'nexus-fairmat-proposal successor of 9636feecb79bb32b828b1a9804269573256d7696' description: Hashvalue of the NeXus application definition file m_annotations: eln: @@ -47,7 +42,6 @@ definitions: type_kind: Enum type_data: - NXem_ebsd - default: NXem_ebsd description: NeXus NXDL schema to which this file conforms m_annotations: eln: @@ -563,14 +557,14 @@ definitions: type_kind: Enum type_data: - undefined - - front, top, left - - front, top, right - - front, bottom, right - - front, bottom, left - - back, top, left - - back, top, right - - back, bottom, right - - back, bottom, left + - front_top_left + - front_top_right + - front_bottom_right + - front_bottom_left + - back_top_left + - back_top_right + - back_bottom_right + - back_bottom_left default: undefined description: | Location of the origin of the sample surface reference frame. @@ -694,14 +688,14 @@ definitions: type_kind: Enum type_data: - undefined - - front, top, left - - front, top, right - - front, bottom, right - - front, bottom, left - - back, top, left - - back, top, right - - back, bottom, right - - back, bottom, left + - front_top_left + - front_top_right + - front_bottom_right + - front_bottom_left + - back_top_left + - back_top_right + - back_bottom_right + - back_bottom_left default: undefined description: | Location of the origin of the sample surface reference frame. @@ -813,14 +807,14 @@ definitions: type_kind: Enum type_data: - undefined - - front, top, left - - front, top, right - - front, bottom, right - - front, bottom, left - - back, top, left - - back, top, right - - back, bottom, right - - back, bottom, left + - front_top_left + - front_top_right + - front_bottom_right + - front_bottom_left + - back_top_left + - back_top_right + - back_bottom_right + - back_bottom_left default: undefined description: | Where is the origin of the detector space reference diff --git a/tests/data/dataconverter/readers/em_spctrscpy/nomad_oasis_eln_schema_for_nx_em/nxem.schema.archive.yaml b/tests/data/dataconverter/readers/em_spctrscpy/nomad_oasis_eln_schema_for_nx_em/nxem.schema.archive.yaml index 0d9c70bb3..5fc3a2fe4 100644 --- a/tests/data/dataconverter/readers/em_spctrscpy/nomad_oasis_eln_schema_for_nx_em/nxem.schema.archive.yaml +++ b/tests/data/dataconverter/readers/em_spctrscpy/nomad_oasis_eln_schema_for_nx_em/nxem.schema.archive.yaml @@ -10,7 +10,7 @@ definitions: ElectronMicroscopy: # the actual schema # nomad.datamodel.metainfo.eln.NexusParser base_sections: - - 'nomad.datamodel.metainfo.eln.NexusParser' + - 'nomad.datamodel.metainfo.eln.NexusDataConverter' - 'nomad.datamodel.data.EntryData' # base_section: nomad.datamodel.data.EntryData m_annotations: @@ -333,7 +333,7 @@ definitions: eln: # quantities: sub_sections: - electron_gun: + electron_source: section: description: The source which creates the electron beam m_annotations: @@ -356,15 +356,14 @@ definitions: component: NumberEditQuantity defaultDisplayUnit: kV minValue: 0.0 - maxValue: 1000.0 + maxValue: 1.1e6 emitter_type: type: type_kind: Enum type_data: - - filament + - thermionic - schottky - - cold_cathode_field_emitter - - other + - field_emission description: | Emitter type used to create the beam. m_annotations: diff --git a/tests/dataconverter/test_helpers.py b/tests/dataconverter/test_helpers.py index 73dd7471d..540cf07bb 100644 --- a/tests/dataconverter/test_helpers.py +++ b/tests/dataconverter/test_helpers.py @@ -97,8 +97,8 @@ def fixture_filled_test_data(template, tmp_path): f"/../" f"data/dataconverter/" f"readers/mpes/" - f"xarray_saved_small_cali" - "bration.h5", tmp_path) + f"xarray_saved_small_calibration.h5", + tmp_path) template.clear() template["optional"]["/ENTRY[my_entry]/NXODD_name/float_value"] = 2.0 diff --git a/tests/dataconverter/test_readers.py b/tests/dataconverter/test_readers.py index 659682c48..3d2c86efd 100644 --- a/tests/dataconverter/test_readers.py +++ b/tests/dataconverter/test_readers.py @@ -53,7 +53,7 @@ def get_all_readers() -> List[ParameterSet]: # Explicitly removing ApmReader and EmNionReader because we need to add test data for reader in [get_reader(x) for x in get_names_of_all_readers()]: - if reader.__name__ in ("ApmReader", "EmOmReader", "EmSpctrscpyReader"): + if reader.__name__ in ("ApmReader", "EmOmReader", "EmSpctrscpyReader", "EmNionReader"): readers.append(pytest.param(reader, marks=pytest.mark.skip(reason="Missing test data.") ))