From 9c56a3ec7036c1f753266ccd59e23e44e1d0b2ee Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 17 Dec 2025 09:11:17 +0100 Subject: [PATCH 1/4] Properly unpack 0D data when reading an IDS from a netCDF file 0D IDS Data is expected in the native python data types (int, float, complex, string). Before this fix, 0D numerical data would be stored as numpy.int32, numpy.float64 or numpy.complex128 when reading from a netCDF file. Fixes #89 --- imas/backends/netcdf/nc2ids.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index 1b1dbfe8..564d5210 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -157,6 +157,8 @@ def run(self, lazy: bool) -> None: for index, node in indexed_tree_iter(self.ids, target_metadata): value = data[index] if value != getattr(var, "_FillValue", None): + if isinstance(value, np.generic): + value = value.item() # NOTE: bypassing IDSPrimitive.value.setter logic node._IDSPrimitive__value = value @@ -166,10 +168,16 @@ def run(self, lazy: bool) -> None: # here, we'll let IDSPrimitive.value.setter take care of it: self.ids[target_metadata.path].value = data - else: + # We need to unpack 0D ints, floats and complex numbers. For better + # performance this check is done outside the for-loop: + elif metadata.ndim or metadata.data_type is IDSDataType.STR: for index, node in indexed_tree_iter(self.ids, target_metadata): # NOTE: bypassing IDSPrimitive.value.setter logic node._IDSPrimitive__value = data[index] + else: + for index, node in indexed_tree_iter(self.ids, target_metadata): + # NOTE: bypassing IDSPrimitive.value.setter logic + node._IDSPrimitive__value = data[index].item() # Unpack 0D value def validate_variables(self) -> None: """Validate that all variables in the netCDF Group exist and match the DD.""" @@ -365,7 +373,7 @@ def get_child(self, child): value = var[self.index] if value is not None: - if isinstance(value, np.ndarray): + if isinstance(value, (np.ndarray, np.generic)): if value.ndim == 0: # Unpack 0D numpy arrays: value = value.item() else: From 28a35f79ffe76dff347f4696b6615459fa78e2ca Mon Sep 17 00:00:00 2001 From: Prasad Date: Thu, 8 Jan 2026 17:51:27 +0100 Subject: [PATCH 2/4] Cleanup unit tests: remove checks for imas-core (#92) --- conftest.py | 34 +------ imas/backends/imas_core/db_entry_al.py | 13 +-- imas/backends/imas_core/imas_interface.py | 31 ++---- imas/exception.py | 6 +- imas/ids_defs.py | 114 +++++++--------------- imas/ids_toplevel.py | 8 -- imas/test/test_cli.py | 4 +- imas/test/test_dbentry.py | 6 +- imas/test/test_exception.py | 2 +- imas/test/test_ids_ascii_data.py | 2 +- imas/test/test_ids_toplevel.py | 2 +- imas/test/test_ids_validate.py | 2 +- imas/test/test_latest_dd_autofill.py | 4 +- imas/test/test_lazy_loading.py | 10 +- imas/test/test_nbc_change.py | 2 +- imas/test/test_snippets.py | 2 +- imas/test/test_static_ids.py | 2 +- imas/test/test_to_xarray.py | 2 +- imas/test/test_util.py | 6 +- 19 files changed, 73 insertions(+), 179 deletions(-) diff --git a/conftest.py b/conftest.py index 51aaa4d4..2d2cd835 100644 --- a/conftest.py +++ b/conftest.py @@ -9,7 +9,6 @@ import functools import logging import os -import sys from copy import deepcopy from pathlib import Path @@ -22,7 +21,6 @@ import pytest from packaging.version import Version -from imas.backends.imas_core.imas_interface import has_imas as _has_imas from imas.backends.imas_core.imas_interface import ll_interface, lowlevel from imas.dd_zip import dd_etree, dd_xml_versions, latest_dd_version from imas.ids_defs import ( @@ -39,17 +37,7 @@ os.environ["IMAS_AL_DISABLE_VALIDATE"] = "1" - -try: - import imas # noqa -except ImportError: - - class SkipOnIMASAccess: - def __getattr__(self, attr): - pytest.skip("This test requires the `imas` HLI, which is not available.") - - # Any test that tries to access an attribute from the `imas` package will be skipped - sys.modules["imas"] = SkipOnIMASAccess() +import imas # noqa def pytest_addoption(parser): @@ -78,7 +66,6 @@ def pytest_addoption(parser): if "not available" in str(iex.message): _BACKENDS.pop("mdsplus") - try: import pytest_xdist except ImportError: @@ -91,28 +78,11 @@ def worker_id(): @pytest.fixture(params=_BACKENDS) def backend(pytestconfig: pytest.Config, request: pytest.FixtureRequest): backends_provided = any(map(pytestconfig.getoption, _BACKENDS)) - if not _has_imas: - if backends_provided: - raise RuntimeError( - "Explicit backends are provided, but IMAS is not available." - ) - pytest.skip("No IMAS available, skip tests using a backend") if backends_provided and not pytestconfig.getoption(request.param): pytest.skip(f"Tests for {request.param} backend are skipped.") return _BACKENDS[request.param] -@pytest.fixture() -def has_imas(): - return _has_imas - - -@pytest.fixture() -def requires_imas(): - if not _has_imas: - pytest.skip("No IMAS available") - - def pytest_generate_tests(metafunc): if "ids_name" in metafunc.fixturenames: if metafunc.config.getoption("ids"): @@ -214,7 +184,7 @@ def wrapper(*args, **kwargs): @pytest.fixture -def log_lowlevel_calls(monkeypatch, requires_imas): +def log_lowlevel_calls(monkeypatch): """Debugging fixture to log calls to the imas lowlevel module.""" for al_function in dir(lowlevel): if al_function.startswith("ual_") or al_function.startswith("al"): diff --git a/imas/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py index dad5019b..b9d118dd 100644 --- a/imas/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -38,7 +38,7 @@ from .al_context import ALContext, LazyALContext from .db_entry_helpers import delete_children, get_children, put_children -from .imas_interface import LLInterfaceError, has_imas, ll_interface +from .imas_interface import LLInterfaceError, ll_interface from .mdsplus_model import mdsplus_model_dir from .uda_support import extract_idsdef, get_dd_version_from_idsdef_xml @@ -52,14 +52,6 @@ logger = logging.getLogger(__name__) -def require_imas_available(): - if not has_imas: - raise RuntimeError( - "The IMAS Core library is not available. Please install 'imas_core', " - "or load a supported IMAS module if you use an HPC environment." - ) - - class ALDBEntryImpl(DBEntryImpl): """DBEntry implementation using imas_core as a backend.""" @@ -86,7 +78,6 @@ def __init__(self, uri: str, mode: int, factory: IDSFactory): @classmethod def from_uri(cls, uri: str, mode: str, factory: IDSFactory) -> "ALDBEntryImpl": - require_imas_available() if mode not in _OPEN_MODES: modes = list(_OPEN_MODES) raise ValueError(f"Unknown mode {mode!r}, was expecting any of {modes}") @@ -105,8 +96,6 @@ def from_pulse_run( options: Any, factory: IDSFactory, ) -> "ALDBEntryImpl": - # Raise an error if imas is not available - require_imas_available() # Set defaults user_name = user_name or getpass.getuser() diff --git a/imas/backends/imas_core/imas_interface.py b/imas/backends/imas_core/imas_interface.py index 8fa3963b..c9d69a02 100644 --- a/imas/backends/imas_core/imas_interface.py +++ b/imas/backends/imas_core/imas_interface.py @@ -12,30 +12,17 @@ from packaging.version import Version -logger = logging.getLogger(__name__) +# Import the Access Layer module +# First try to import imas_core, which is available since AL 5.2 +from imas_core import _al_lowlevel as lowlevel +from imas_core import imasdef # noqa: F401 +logger = logging.getLogger(__name__) -# Import the Access Layer module -has_imas = True -try: - # First try to import imas_core, which is available since AL 5.2 - from imas_core import _al_lowlevel as lowlevel - from imas_core import imasdef - - # Enable throwing exceptions from the _al_lowlevel interface - enable_exceptions = getattr(lowlevel, "imas_core_config_enable_exceptions", None) - if enable_exceptions: - enable_exceptions() - -except ImportError as exc: - imas = None - has_imas = False - imasdef = None - lowlevel = None - logger.warning( - "Could not import 'imas_core': %s. Some functionality is not available.", - exc, - ) +# Enable throwing exceptions from the _al_lowlevel interface +enable_exceptions = getattr(lowlevel, "imas_core_config_enable_exceptions", None) +if enable_exceptions: + enable_exceptions() class LLInterfaceError(RuntimeError): diff --git a/imas/exception.py b/imas/exception.py index 737680c2..737284d8 100644 --- a/imas/exception.py +++ b/imas/exception.py @@ -20,10 +20,8 @@ # Expose ALException, which may be thrown by the lowlevel -if _imas_interface.has_imas: - ALException = _imas_interface.lowlevel.ALException -else: - ALException = None + +ALException = _imas_interface.lowlevel.ALException class IDSNameError(ValueError): diff --git a/imas/ids_defs.py b/imas/ids_defs.py index af4ed45c..3ac3c6be 100644 --- a/imas/ids_defs.py +++ b/imas/ids_defs.py @@ -86,86 +86,46 @@ Identifier for the default serialization protocol. """ -import functools import logging -from imas.backends.imas_core.imas_interface import has_imas, imasdef +from imas.backends.imas_core.imas_interface import imasdef logger = logging.getLogger(__name__) -if has_imas: - ASCII_BACKEND = imasdef.ASCII_BACKEND - CHAR_DATA = imasdef.CHAR_DATA - CLOSE_PULSE = imasdef.CLOSE_PULSE - CLOSEST_INTERP = imasdef.CLOSEST_INTERP - CREATE_PULSE = imasdef.CREATE_PULSE - DOUBLE_DATA = imasdef.DOUBLE_DATA - COMPLEX_DATA = imasdef.COMPLEX_DATA - EMPTY_COMPLEX = imasdef.EMPTY_COMPLEX - EMPTY_FLOAT = imasdef.EMPTY_FLOAT - EMPTY_INT = imasdef.EMPTY_INT - ERASE_PULSE = imasdef.ERASE_PULSE - FORCE_CREATE_PULSE = imasdef.FORCE_CREATE_PULSE - FORCE_OPEN_PULSE = imasdef.FORCE_OPEN_PULSE - HDF5_BACKEND = imasdef.HDF5_BACKEND - IDS_TIME_MODE_HETEROGENEOUS = imasdef.IDS_TIME_MODE_HETEROGENEOUS - IDS_TIME_MODE_HOMOGENEOUS = imasdef.IDS_TIME_MODE_HOMOGENEOUS - IDS_TIME_MODE_INDEPENDENT = imasdef.IDS_TIME_MODE_INDEPENDENT - IDS_TIME_MODE_UNKNOWN = imasdef.IDS_TIME_MODE_UNKNOWN - IDS_TIME_MODES = imasdef.IDS_TIME_MODES - INTEGER_DATA = imasdef.INTEGER_DATA - LINEAR_INTERP = imasdef.LINEAR_INTERP - MDSPLUS_BACKEND = imasdef.MDSPLUS_BACKEND - MEMORY_BACKEND = imasdef.MEMORY_BACKEND - NODE_TYPE_STRUCTURE = imasdef.NODE_TYPE_STRUCTURE - OPEN_PULSE = imasdef.OPEN_PULSE - PREVIOUS_INTERP = imasdef.PREVIOUS_INTERP - READ_OP = imasdef.READ_OP - UDA_BACKEND = imasdef.UDA_BACKEND - UNDEFINED_INTERP = imasdef.UNDEFINED_INTERP - UNDEFINED_TIME = imasdef.UNDEFINED_TIME - WRITE_OP = imasdef.WRITE_OP - ASCII_SERIALIZER_PROTOCOL = getattr(imasdef, "ASCII_SERIALIZER_PROTOCOL", 60) - FLEXBUFFERS_SERIALIZER_PROTOCOL = getattr( - imasdef, "FLEXBUFFERS_SERIALIZER_PROTOCOL", None - ) - DEFAULT_SERIALIZER_PROTOCOL = getattr(imasdef, "DEFAULT_SERIALIZER_PROTOCOL", 60) - -else: - # Preset some constants which are used elsewhere - # this is a bit ugly, perhaps reuse the list of imports from above? - # it seems no problem to use None, since the use of the values should not - # be allowed, they are only used in operations which use the backend, - # which we (should) gate - ASCII_BACKEND = CHAR_DATA = CLOSE_PULSE = CLOSEST_INTERP = DOUBLE_DATA = None - FORCE_OPEN_PULSE = CREATE_PULSE = ERASE_PULSE = None - COMPLEX_DATA = FORCE_CREATE_PULSE = HDF5_BACKEND = None - INTEGER_DATA = LINEAR_INTERP = MDSPLUS_BACKEND = MEMORY_BACKEND = None - NODE_TYPE_STRUCTURE = OPEN_PULSE = PREVIOUS_INTERP = READ_OP = None - UDA_BACKEND = UNDEFINED_INTERP = UNDEFINED_TIME = WRITE_OP = None - # These constants are also useful when not working with the AL - EMPTY_FLOAT = -9e40 - EMPTY_INT = -999_999_999 - EMPTY_COMPLEX = complex(EMPTY_FLOAT, EMPTY_FLOAT) - IDS_TIME_MODE_UNKNOWN = EMPTY_INT - IDS_TIME_MODE_HETEROGENEOUS = 0 - IDS_TIME_MODE_HOMOGENEOUS = 1 - IDS_TIME_MODE_INDEPENDENT = 2 - IDS_TIME_MODES = [0, 1, 2] - ASCII_SERIALIZER_PROTOCOL = 60 - FLEXBUFFERS_SERIALIZER_PROTOCOL = None - DEFAULT_SERIALIZER_PROTOCOL = 60 - - -def needs_imas(func): - if has_imas: - return func - - @functools.wraps(func) - def wrapper(*args, **kwargs): - raise RuntimeError( - f"Function {func.__name__} requires IMAS, but IMAS is not available." - ) - - return wrapper +ASCII_BACKEND = imasdef.ASCII_BACKEND +CHAR_DATA = imasdef.CHAR_DATA +CLOSE_PULSE = imasdef.CLOSE_PULSE +CLOSEST_INTERP = imasdef.CLOSEST_INTERP +CREATE_PULSE = imasdef.CREATE_PULSE +DOUBLE_DATA = imasdef.DOUBLE_DATA +COMPLEX_DATA = imasdef.COMPLEX_DATA +EMPTY_COMPLEX = imasdef.EMPTY_COMPLEX +EMPTY_FLOAT = imasdef.EMPTY_FLOAT +EMPTY_INT = imasdef.EMPTY_INT +ERASE_PULSE = imasdef.ERASE_PULSE +FORCE_CREATE_PULSE = imasdef.FORCE_CREATE_PULSE +FORCE_OPEN_PULSE = imasdef.FORCE_OPEN_PULSE +HDF5_BACKEND = imasdef.HDF5_BACKEND +IDS_TIME_MODE_HETEROGENEOUS = imasdef.IDS_TIME_MODE_HETEROGENEOUS +IDS_TIME_MODE_HOMOGENEOUS = imasdef.IDS_TIME_MODE_HOMOGENEOUS +IDS_TIME_MODE_INDEPENDENT = imasdef.IDS_TIME_MODE_INDEPENDENT +IDS_TIME_MODE_UNKNOWN = imasdef.IDS_TIME_MODE_UNKNOWN +IDS_TIME_MODES = imasdef.IDS_TIME_MODES +INTEGER_DATA = imasdef.INTEGER_DATA +LINEAR_INTERP = imasdef.LINEAR_INTERP +MDSPLUS_BACKEND = imasdef.MDSPLUS_BACKEND +MEMORY_BACKEND = imasdef.MEMORY_BACKEND +NODE_TYPE_STRUCTURE = imasdef.NODE_TYPE_STRUCTURE +OPEN_PULSE = imasdef.OPEN_PULSE +PREVIOUS_INTERP = imasdef.PREVIOUS_INTERP +READ_OP = imasdef.READ_OP +UDA_BACKEND = imasdef.UDA_BACKEND +UNDEFINED_INTERP = imasdef.UNDEFINED_INTERP +UNDEFINED_TIME = imasdef.UNDEFINED_TIME +WRITE_OP = imasdef.WRITE_OP +ASCII_SERIALIZER_PROTOCOL = getattr(imasdef, "ASCII_SERIALIZER_PROTOCOL", 60) +FLEXBUFFERS_SERIALIZER_PROTOCOL = getattr( + imasdef, "FLEXBUFFERS_SERIALIZER_PROTOCOL", None +) +DEFAULT_SERIALIZER_PROTOCOL = getattr(imasdef, "DEFAULT_SERIALIZER_PROTOCOL", 60) diff --git a/imas/ids_toplevel.py b/imas/ids_toplevel.py index 947bf72f..fcda5f0d 100644 --- a/imas/ids_toplevel.py +++ b/imas/ids_toplevel.py @@ -22,7 +22,6 @@ IDS_TIME_MODE_INDEPENDENT, IDS_TIME_MODE_UNKNOWN, IDS_TIME_MODES, - needs_imas, ) from imas.ids_metadata import IDSMetadata, IDSType, get_toplevel_metadata from imas.ids_structure import IDSStructure @@ -99,7 +98,6 @@ def default_serializer_protocol(): """Return the default serializer protocol.""" return DEFAULT_SERIALIZER_PROTOCOL - @needs_imas def serialize(self, protocol=None) -> bytes: """Serialize this IDS to a data buffer. @@ -169,7 +167,6 @@ def serialize(self, protocol=None) -> bytes: return bytes(buffer) raise ValueError(f"Unrecognized serialization protocol: {protocol}") - @needs_imas def deserialize(self, data: bytes) -> None: """Deserialize the data buffer into this IDS. @@ -289,7 +286,6 @@ def _validate(self): for child in self.iter_nonempty_(accept_lazy=True): child._validate() - @needs_imas def get(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None: """Get data from AL backend storage format. @@ -300,7 +296,6 @@ def get(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None raise NotImplementedError() db_entry.get(self.metadata.name, occurrence, destination=self) - @needs_imas def getSlice( self, time_requested: float, @@ -323,7 +318,6 @@ def getSlice( destination=self, ) - @needs_imas def putSlice( self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None ) -> None: @@ -336,7 +330,6 @@ def putSlice( raise NotImplementedError() db_entry.put_slice(self, occurrence) - @needs_imas def deleteData( self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None ) -> None: @@ -349,7 +342,6 @@ def deleteData( raise NotImplementedError() db_entry.delete_data(self, occurrence) - @needs_imas def put(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None: """Put this IDS to the backend. diff --git a/imas/test/test_cli.py b/imas/test/test_cli.py index 0f4b305e..130aa287 100644 --- a/imas/test/test_cli.py +++ b/imas/test/test_cli.py @@ -17,7 +17,7 @@ def test_imas_version(): @pytest.mark.cli -def test_db_analysis(tmp_path, requires_imas): +def test_db_analysis(tmp_path): # This only tests the happy flow, error handling is not tested db_path = tmp_path / "test_db_analysis" with DBEntry(f"imas:hdf5?path={db_path}", "w") as entry: @@ -42,7 +42,7 @@ def test_db_analysis(tmp_path, requires_imas): @pytest.mark.cli -def test_db_analysis_csv(tmp_path, requires_imas): +def test_db_analysis_csv(tmp_path): with DBEntry(f"imas:hdf5?path={tmp_path}/entry1", "w") as entry: eq = entry.factory.equilibrium() eq.ids_properties.homogeneous_time = 2 diff --git a/imas/test/test_dbentry.py b/imas/test/test_dbentry.py index e13d82a4..f014eb9b 100644 --- a/imas/test/test_dbentry.py +++ b/imas/test/test_dbentry.py @@ -6,7 +6,7 @@ from imas.test.test_helpers import compare_children, open_dbentry -def test_dbentry_contextmanager(requires_imas): +def test_dbentry_contextmanager(): entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) entry.create() ids = entry.factory.core_profiles() @@ -22,7 +22,7 @@ def test_dbentry_contextmanager(requires_imas): assert entry2._dbe_impl is None -def test_dbentry_contextmanager_uri(tmp_path, requires_imas): +def test_dbentry_contextmanager_uri(tmp_path): entry = imas.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "w") ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 @@ -77,7 +77,7 @@ def test_dbentry_constructor(): assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) -def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path, requires_imas): +def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path): entry = open_dbentry(imas.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 diff --git a/imas/test/test_exception.py b/imas/test/test_exception.py index 37bebfce..c0b66230 100644 --- a/imas/test/test_exception.py +++ b/imas/test/test_exception.py @@ -4,7 +4,7 @@ from imas.backends.imas_core.imas_interface import ll_interface -def test_catch_al_exception(requires_imas): +def test_catch_al_exception(): # Do something which lets the lowlevel Cython interface throw an ALException # Ensure we can catch it: with pytest.raises(imas.exception.ALException): diff --git a/imas/test/test_ids_ascii_data.py b/imas/test/test_ids_ascii_data.py index d15fecf1..20ae8a66 100644 --- a/imas/test/test_ids_ascii_data.py +++ b/imas/test/test_ids_ascii_data.py @@ -18,7 +18,7 @@ def test_data_exists(): @pytest.fixture -def test_data(requires_imas): +def test_data(): db_entry = imas.training.get_training_db_entry() yield db_entry db_entry.close() diff --git a/imas/test/test_ids_toplevel.py b/imas/test/test_ids_toplevel.py index a5855817..e55bac4d 100644 --- a/imas/test/test_ids_toplevel.py +++ b/imas/test/test_ids_toplevel.py @@ -46,7 +46,7 @@ def test_pretty_print(ids): assert pprint.pformat(ids) == "" -def test_serialize_nondefault_dd_version(requires_imas): +def test_serialize_nondefault_dd_version(): ids = IDSFactory("3.31.0").core_profiles() fill_with_random_data(ids) data = ids.serialize() diff --git a/imas/test/test_ids_validate.py b/imas/test/test_ids_validate.py index 7970c7e2..c3f8f157 100644 --- a/imas/test/test_ids_validate.py +++ b/imas/test/test_ids_validate.py @@ -245,7 +245,7 @@ def test_validate_coordinate_same_as(): (None, True), ], ) -def test_validate_on_put(monkeypatch, env_value, should_validate, requires_imas): +def test_validate_on_put(monkeypatch, env_value, should_validate): dbentry = DBEntry(MEMORY_BACKEND, "test", 1, 1) dbentry.create() ids = dbentry.factory.core_profiles() diff --git a/imas/test/test_latest_dd_autofill.py b/imas/test/test_latest_dd_autofill.py index 6d34b766..6b7fbb6a 100644 --- a/imas/test/test_latest_dd_autofill.py +++ b/imas/test/test_latest_dd_autofill.py @@ -55,7 +55,7 @@ def test_latest_dd_autofill(ids_name, backend, worker_id, tmp_path): @pytest.mark.parametrize( "serializer", [ASCII_SERIALIZER_PROTOCOL, FLEXBUFFERS_SERIALIZER_PROTOCOL] ) -def test_latest_dd_autofill_serialize(serializer, ids_name, has_imas): +def test_latest_dd_autofill_serialize(serializer, ids_name): """Serialize and then deserialize again all IDSToplevels""" if serializer is None: pytest.skip("Unsupported serializer") @@ -64,8 +64,6 @@ def test_latest_dd_autofill_serialize(serializer, ids_name, has_imas): ids = factory.new(ids_name) fill_with_random_data(ids) - if not has_imas: - return # rest of the test requires an IMAS install data = ids.serialize(serializer) ids2 = factory.new(ids_name) diff --git a/imas/test/test_lazy_loading.py b/imas/test/test_lazy_loading.py index 4a7c65ca..1dcd0bff 100644 --- a/imas/test/test_lazy_loading.py +++ b/imas/test/test_lazy_loading.py @@ -94,7 +94,7 @@ def iterate(structure): dbentry.close() -def test_lazy_load_close_dbentry(requires_imas): +def test_lazy_load_close_dbentry(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1) dbentry.create() @@ -109,7 +109,7 @@ def test_lazy_load_close_dbentry(requires_imas): print(lazy_ids.time) -def test_lazy_load_readonly(requires_imas): +def test_lazy_load_readonly(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1) dbentry.create() run_lazy_load_readonly(dbentry) @@ -151,7 +151,7 @@ def run_lazy_load_readonly(dbentry): dbentry.close() -def test_lazy_load_no_put(requires_imas): +def test_lazy_load_no_put(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1) dbentry.create() @@ -169,7 +169,7 @@ def test_lazy_load_no_put(requires_imas): dbentry.close() -def test_lazy_load_with_new_aos(requires_imas): +def test_lazy_load_with_new_aos(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1, dd_version="3.30.0") dbentry.create() et = dbentry.factory.edge_transport() @@ -214,7 +214,7 @@ def test_lazy_load_with_new_aos_netcdf(tmp_path): assert len(lazy_et.model[0].ggd[0].electrons.particles.d_radial) == 0 -def test_lazy_load_with_new_structure(requires_imas): +def test_lazy_load_with_new_structure(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1, dd_version="3.30.0") dbentry.create() diff --git a/imas/test/test_nbc_change.py b/imas/test/test_nbc_change.py index 91ede0e3..b34949df 100644 --- a/imas/test/test_nbc_change.py +++ b/imas/test/test_nbc_change.py @@ -49,7 +49,7 @@ def test_nbc_structure_to_aos(caplog): assert caplog.record_tuples[0][:2] == ("imas.ids_convert", logging.WARNING) -def test_nbc_0d_to_1d(caplog, requires_imas): +def test_nbc_0d_to_1d(caplog): # channel/filter_spectrometer/radiance_calibration in spectrometer visible changed # from FLT_0D to FLT_1D in DD 3.39.0 ids = IDSFactory("3.32.0").spectrometer_visible() diff --git a/imas/test/test_snippets.py b/imas/test/test_snippets.py index 0574b185..8ed49a83 100644 --- a/imas/test/test_snippets.py +++ b/imas/test/test_snippets.py @@ -13,7 +13,7 @@ @pytest.mark.skip(reason="skipping hli test") @pytest.mark.filterwarnings("ignore:The input coordinates to pcolormesh:UserWarning") @pytest.mark.parametrize("snippet", course_snippets) -def test_script_execution(snippet, monkeypatch, tmp_path, requires_imas): +def test_script_execution(snippet, monkeypatch, tmp_path): monkeypatch.chdir(tmp_path) # Prevent showing plots in a GUI monkeypatch.delenv("DISPLAY", raising=False) diff --git a/imas/test/test_static_ids.py b/imas/test/test_static_ids.py index 2c66811d..05133615 100644 --- a/imas/test/test_static_ids.py +++ b/imas/test/test_static_ids.py @@ -21,7 +21,7 @@ def test_ids_valid_type(): assert ids_types in ({IDSType.NONE}, {IDSType.CONSTANT, IDSType.DYNAMIC}) -def test_constant_ids(caplog, requires_imas): +def test_constant_ids(caplog): ids = imas.IDSFactory().new("amns_data") if ids.metadata.type is IDSType.NONE: pytest.skip("IDS definition has no constant IDSs") diff --git a/imas/test/test_to_xarray.py b/imas/test/test_to_xarray.py index 1767a6d9..a5df6a1e 100644 --- a/imas/test/test_to_xarray.py +++ b/imas/test/test_to_xarray.py @@ -9,7 +9,7 @@ @pytest.fixture -def entry(requires_imas, monkeypatch): +def entry(monkeypatch): monkeypatch.setenv("IMAS_VERSION", "3.39.0") # Use fixed DD version return imas.training.get_training_db_entry() diff --git a/imas/test/test_util.py b/imas/test/test_util.py index 15a2a8c0..1834af9c 100644 --- a/imas/test/test_util.py +++ b/imas/test/test_util.py @@ -54,7 +54,7 @@ def test_inspect(): inspect(cp.profiles_1d[1].grid.rho_tor_norm) # IDSPrimitive -def test_inspect_lazy(requires_imas): +def test_inspect_lazy(): with get_training_db_entry() as entry: cp = entry.get("core_profiles", lazy=True) inspect(cp) @@ -141,7 +141,7 @@ def test_idsdiffgen(): assert diff[0] == ("profiles_1d/time", -1, 0) -def test_idsdiff(requires_imas): +def test_idsdiff(): # Test the diff rendering for two sample IDSs with get_training_db_entry() as entry: imas.util.idsdiff(entry.get("core_profiles"), entry.get("equilibrium")) @@ -179,7 +179,7 @@ def test_get_toplevel(): assert get_toplevel(cp) is cp -def test_is_lazy_loaded(requires_imas): +def test_is_lazy_loaded(): with get_training_db_entry() as entry: assert is_lazy_loaded(entry.get("core_profiles")) is False assert is_lazy_loaded(entry.get("core_profiles", lazy=True)) is True From 0de0782573cc6c6bbca40bf78de1c5952c4d9e26 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> Date: Thu, 15 Jan 2026 08:36:34 +0100 Subject: [PATCH 3/4] Defer loading the default DD definitions (#95) --- imas/backends/db_entry_impl.py | 2 +- imas/db_entry.py | 6 +++--- imas/ids_factory.py | 19 ++++++++++++++++++- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/imas/backends/db_entry_impl.py b/imas/backends/db_entry_impl.py index df1e4638..0c1b2cd6 100644 --- a/imas/backends/db_entry_impl.py +++ b/imas/backends/db_entry_impl.py @@ -78,7 +78,7 @@ def get( destination: IDSToplevel, lazy: bool, nbc_map: Optional[NBCPathMap], - ) -> None: + ) -> IDSToplevel: """Implement DBEntry.get/get_slice/get_sample. Load data from the data source. Args: diff --git a/imas/db_entry.py b/imas/db_entry.py index 471a50ad..5a470641 100644 --- a/imas/db_entry.py +++ b/imas/db_entry.py @@ -160,7 +160,7 @@ def __init__( legacy = True except TypeError as exc2: raise TypeError( - f"Incorrect arguments to {__class__.__name__}.__init__(): " + "Incorrect arguments to DBEntry.__init__(): " f"{exc1.args[0]}, {exc2.args[0]}" ) from None @@ -561,7 +561,7 @@ def _get( raise RuntimeError("Database entry is not open.") if lazy and destination: raise ValueError("Cannot supply a destination IDS when lazy loading.") - if not self._ids_factory.exists(ids_name): + if autoconvert and not self._ids_factory.exists(ids_name): raise IDSNameError(ids_name, self._ids_factory) # Note: this will raise an exception when the ids/occurrence is not filled: @@ -577,7 +577,7 @@ def _get( ids_name, occurrence, ) - elif dd_version != self.dd_version and dd_version not in dd_xml_versions(): + elif dd_version not in dd_xml_versions() and dd_version != self.dd_version: # We don't know the DD version that this IDS was written with if ignore_unknown_dd_version: # User chooses to ignore this problem, load as if it was stored with diff --git a/imas/ids_factory.py b/imas/ids_factory.py index b840d8a8..5a8209db 100644 --- a/imas/ids_factory.py +++ b/imas/ids_factory.py @@ -41,6 +41,17 @@ def __init__( version: DD version string, e.g. "3.38.1". xml_path: XML file containing data dictionary definition. """ + if version is None and xml_path is None: + # Defer loading the DD definitions until we really need them + self.__deferred_init = True + else: + # If a specific version or xml_path is requested, we still load immediately + # so any exceptions are raise when creating the IDSfactory + self.__do_init(version, xml_path) + self.__deferred_init = False + + def __do_init(self, version: str | None, xml_path: str | pathlib.Path | None): + """Actual initialization logic""" self._xml_path = xml_path self._etree = dd_zip.dd_etree(version, xml_path) self._ids_elements = { @@ -71,10 +82,16 @@ def __dir__(self) -> Iterable[str]: return sorted(set(object.__dir__(self)).union(self._ids_elements)) def __getattr__(self, name: str) -> Any: + # Actually initialize when we deferred it before + if self.__deferred_init: + self.__do_init(None, None) + self.__deferred_init = False + return getattr(self, name) + # Check if the name matches any IDS and return a 'constructor' for it if name in self._ids_elements: # Note: returning a partial to mimic AL HLI, e.g. factory.core_profiles() return partial(IDSToplevel, self, self._ids_elements[name]) - raise AttributeError(f"{type(self)!r} object has no attribute {name!r}") + raise AttributeError(f"'IDSFactory' has no attribute {name!r}") def __iter__(self) -> Iterator[str]: """Iterate over the IDS names defined by the loaded Data Dictionary""" From fc77ae5c676d0ee98fb2a6cf766427b62439a77c Mon Sep 17 00:00:00 2001 From: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> Date: Fri, 16 Jan 2026 12:35:43 +0100 Subject: [PATCH 4/4] Convert core/edge to plasma IDS (#87) --- docs/source/api.rst | 16 ++- imas/__init__.py | 58 +++++++--- imas/command/cli.py | 71 +++++++++++- imas/convert_core_edge_plasma.py | 124 +++++++++++++++++++++ imas/test/test_cli.py | 37 +++++- imas/test/test_convert_core_edge_plasma.py | 68 +++++++++++ setup.cfg | 3 - 7 files changed, 351 insertions(+), 26 deletions(-) create mode 100644 imas/convert_core_edge_plasma.py create mode 100644 imas/test/test_convert_core_edge_plasma.py diff --git a/docs/source/api.rst b/docs/source/api.rst index 5df6e579..63e8af41 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -7,16 +7,24 @@ This page provides an auto-generated summary of IMAS-Python's API. For more deta and examples, refer to the relevant chapters in the main part of the documentation. -IMAS-Python IDS manipulation ----------------------------- +IMAS-Python public API +---------------------- .. currentmodule:: imas .. autosummary:: + convert_core_edge_plasma.convert_to_plasma_profiles + convert_core_edge_plasma.convert_to_plasma_sources + convert_core_edge_plasma.convert_to_plasma_transport db_entry.DBEntry + ids_convert.convert_ids + ids_data_type.IDSDataType ids_factory.IDSFactory - ids_toplevel.IDSToplevel + ids_identifiers.identifiers + ids_metadata.IDSMetadata + ids_metadata.IDSType ids_primitive.IDSPrimitive - ids_structure.IDSStructure ids_struct_array.IDSStructArray + ids_structure.IDSStructure + ids_toplevel.IDSToplevel \ No newline at end of file diff --git a/imas/__init__.py b/imas/__init__.py index 58a66994..4154b9f6 100644 --- a/imas/__init__.py +++ b/imas/__init__.py @@ -1,30 +1,58 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -# isort: skip_file - from packaging.version import Version as _V -from ._version import version as __version__ # noqa: F401 -from ._version import version_tuple # noqa: F401 - # Import logging _first_ -from . import setup_logging +# isort: off +from . import setup_logging # noqa: F401 + +# isort: on -# Import main user API objects in the imas module +# Ensure that `imas.util` is loaded when importing imas +from . import util # noqa: F401 + +# Public API: +from ._version import version as __version__ +from ._version import version_tuple +from .convert_core_edge_plasma import ( + convert_to_plasma_profiles, + convert_to_plasma_sources, + convert_to_plasma_transport, +) from .db_entry import DBEntry -from .ids_factory import IDSFactory from .ids_convert import convert_ids +from .ids_data_type import IDSDataType +from .ids_factory import IDSFactory from .ids_identifiers import identifiers - -# Load the IMAS-Python IMAS AL/DD core -from . import ( - db_entry, - dd_zip, - util, -) +from .ids_metadata import IDSMetadata, IDSType +from .ids_primitive import IDSPrimitive +from .ids_struct_array import IDSStructArray +from .ids_structure import IDSStructure +from .ids_toplevel import IDSToplevel PUBLISHED_DOCUMENTATION_ROOT = "https://imas-python.readthedocs.io/en/latest/" """URL to the published documentation.""" OLDEST_SUPPORTED_VERSION = _V("3.22.0") """Oldest Data Dictionary version that is supported by IMAS-Python.""" + +__all__ = [ + "__version__", + "version_tuple", + "DBEntry", + "IDSDataType", + "IDSFactory", + "IDSMetadata", + "IDSPrimitive", + "IDSStructure", + "IDSStructArray", + "IDSToplevel", + "IDSType", + "convert_ids", + "convert_to_plasma_profiles", + "convert_to_plasma_sources", + "convert_to_plasma_transport", + "identifiers", + "PUBLISHED_DOCUMENTATION_ROOT", + "OLDEST_SUPPORTED_VERSION", +] diff --git a/imas/command/cli.py b/imas/command/cli.py index a270d834..da921973 100644 --- a/imas/command/cli.py +++ b/imas/command/cli.py @@ -1,6 +1,6 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -""" Main CLI entry point """ +"""Main CLI entry point""" import logging import sys @@ -22,7 +22,13 @@ import imas import imas.backends.imas_core.imas_interface -from imas import DBEntry, dd_zip +from imas import ( + DBEntry, + dd_zip, + convert_to_plasma_profiles, + convert_to_plasma_sources, + convert_to_plasma_transport, +) from imas.backends.imas_core.imas_interface import ll_interface from imas.command.db_analysis import analyze_db, process_db_analysis from imas.command.helpers import min_version_guard, setup_rich_log_handler @@ -109,6 +115,23 @@ def print_ids(uri, ids, occurrence, print_all): imas.util.print_tree(ids_obj, not print_all) +def _check_convert_to_plasma_ids(idss_with_occurrences): + """Check if no plasma_ IDS is present when converting a core_ or edge_ IDS.""" + idsnames = {ids_name for ids_name, _ in idss_with_occurrences} + for suffix in ("_profiles", "_sources", "_transport"): + if f"plasma{suffix}" in idsnames: + if f"core{suffix}" in idsnames: + overlap = "core" + elif f"edge{suffix}" in idsnames: + overlap = "edge" + else: + continue + raise RuntimeError( + f"Cannot convert {overlap}{suffix} IDS to plasma{suffix}: " + f"there already exists a plasma{suffix} IDS in the data source." + ) + + @cli.command("convert", no_args_is_help=True) @click.argument("uri_in") @click.argument("dd_version") @@ -127,8 +150,21 @@ def print_ids(uri, ids, occurrence, print_all): is_flag=True, help="Don't add provenance metadata to the converted IDS.", ) +@click.option( + "--convert-to-plasma-ids", + is_flag=True, + help="Convert core/edge profiles/transport/sources to the corresponding plasma IDS", +) def convert_ids( - uri_in, dd_version, uri_out, ids, occurrence, quiet, timeit, no_provenance + uri_in: str, + dd_version: str, + uri_out: str, + ids: str, + occurrence: int, + quiet: bool, + timeit: bool, + no_provenance: bool, + convert_to_plasma_ids: bool, ): """Convert a Data Entry (or a single IDS) to the target DD version. @@ -174,6 +210,10 @@ def convert_ids( else: idss_with_occurrences.append((ids_name, occurrence)) + if convert_to_plasma_ids: # Sanity checks for conversion to plasma IDSs + _check_convert_to_plasma_ids(idss_with_occurrences) + next_plasma_occurrence = {"_profiles": 0, "_transport": 0, "_sources": 0} + # Create progress bar and task columns = ( TimeElapsedColumn(), @@ -209,6 +249,31 @@ def convert_ids( provenance_origin_uri=provenance_origin_uri, ) + # Convert to plasma_profiles/plasma_sources/plasma_transport IDS + if convert_to_plasma_ids and ids_name.startswith(("core", "edge")): + suffix = ids_name[4:] + # This branch also matches core_instant_changes: check that suffix is ok + if suffix in next_plasma_occurrence: + logger.info( + "Storing IDS %s/%d as plasma%s/%d", + ids_name, + occurrence, + suffix, + next_plasma_occurrence[suffix], + ) + occurrence = next_plasma_occurrence[suffix] + next_plasma_occurrence[suffix] += 1 + + name2 = f"[bold green]plasma{suffix}[/][green]/{occurrence}[/]" + progress.update(task, description=f"Converting {name} to {name2}") + if suffix == "_profiles": + ids2 = convert_to_plasma_profiles(ids2) + elif suffix == "_sources": + ids2 = convert_to_plasma_sources(ids2) + elif suffix == "_transport": + ids2 = convert_to_plasma_transport(ids2) + name = name2 + # Store in output entry: progress.update(task, description=f"Storing {name}", advance=1) with timer("Put", name): diff --git a/imas/convert_core_edge_plasma.py b/imas/convert_core_edge_plasma.py new file mode 100644 index 00000000..5a13c5a7 --- /dev/null +++ b/imas/convert_core_edge_plasma.py @@ -0,0 +1,124 @@ +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""Logic to convert core/edge IDSs to their corresponding plasma ID.""" + +from packaging.version import Version + +from imas.ids_toplevel import IDSToplevel +from imas.ids_factory import IDSFactory +from imas.exception import IDSNameError +from imas.ids_convert import DDVersionMap, NBCPathMap, _copy_structure + + +def convert_to_plasma_profiles( + core_or_edge_profiles: IDSToplevel, *, deepcopy: bool = False +) -> IDSToplevel: + """Convert a core_profiles or edge_profiles IDS to a plasma_profiles IDS. + + The input IDS must use a Data Dictionary version for which the plasma_profiles IDS + exists (3.42.0 or newer). + + Args: + core_or_edge_profiles: The core_profiles or edge_profiles IDS to be converted. + + Keyword Args: + deepcopy: When True, performs a deep copy of all data. When False (default), + numpy arrays are not copied and the converted IDS shares the same underlying + data buffers. + """ + return _convert_to_plasma(core_or_edge_profiles, "profiles", deepcopy) + + +def convert_to_plasma_sources( + core_or_edge_sources: IDSToplevel, *, deepcopy: bool = False +) -> IDSToplevel: + """Convert a core_sources or edge_sources IDS to a plasma_sources IDS. + + The input IDS must use a Data Dictionary version for which the plasma_sources IDS + exists (3.42.0 or newer). + + Args: + core_or_edge_sources: The core_sources or edge_sources IDS to be converted. + + Keyword Args: + deepcopy: When True, performs a deep copy of all data. When False (default), + numpy arrays are not copied and the converted IDS shares the same underlying + data buffers. + """ + return _convert_to_plasma(core_or_edge_sources, "sources", deepcopy) + + +def convert_to_plasma_transport( + core_or_edge_transport: IDSToplevel, *, deepcopy: bool = False +) -> IDSToplevel: + """Convert a core_transport or edge_transport IDS to a plasma_transport IDS. + + The input IDS must use a Data Dictionary version for which the plasma_transport IDS + exists (3.42.0 or newer). + + Args: + core_or_edge_transport: The core_transport or edge_transport IDS to be + converted. + + Keyword Args: + deepcopy: When True, performs a deep copy of all data. When False (default), + numpy arrays are not copied and the converted IDS shares the same underlying + data buffers. + """ + return _convert_to_plasma(core_or_edge_transport, "transport", deepcopy) + + +class _CoreEdgePlasmaMap(DDVersionMap): + """Subclass of DDVersionMap to generate an NBCPathMap that is suitable to copy + between a core/edge IDS and the corresponding plasma IDS.""" + + def __init__(self, source, target, factory): + self.ids_name = source + self.old_version = factory._etree + self.new_version = factory._etree + self.version_old = Version(factory.version) + + self.old_to_new = NBCPathMap() + self.new_to_old = NBCPathMap() + + old_ids_object = factory._etree.find(f"IDS[@name='{source}']") + new_ids_object = factory._etree.find(f"IDS[@name='{target}']") + self._build_map(old_ids_object, new_ids_object) + + +def _convert_to_plasma(source: IDSToplevel, suffix: str, deepcopy: bool) -> IDSToplevel: + # Sanity checks for input data + if not isinstance(source, IDSToplevel): + raise TypeError( + f"First argument to convert_to_plasma_{suffix} must be a core_{suffix} or " + f"edge_{suffix} of type IDSToplevel. Got a type {type(source)} instead." + ) + if source.metadata.name not in [f"core_{suffix}", f"edge_{suffix}"]: + raise ValueError( + f"First argument to convert_to_plasma_{suffix} must be a core_{suffix} or " + f"edge_{suffix} IDS. Got a {source.metadata.name} IDS instead." + ) + if source._lazy: + raise NotImplementedError( + "IDS conversion is not implemented for lazy-loaded IDSs" + ) + + # Construct target plasma_{suffix} IDS + factory: IDSFactory = source._parent + try: + target = factory.new(f"plasma_{suffix}") + except IDSNameError: + raise ValueError( + f"Cannot convert {source.metadata.name} IDS to plasma_{suffix}: the source " + f"IDS uses Data Dictionary version {factory.dd_version} which doesn't have " + f"a plasma_{suffix} IDS. Please convert the source IDS to a supported Data " + "Dictionary version using `imas.convert_ids` and try again." + ) from None + + # Leverage existing logic from ids_convert to do the copying + # First construct a map (to handle missing items in the target IDS) + data_map = _CoreEdgePlasmaMap(source.metadata.name, target.metadata.name, factory) + path_map = data_map.old_to_new # old = core/edge, new = plasma IDS + _copy_structure(source, target, deepcopy, path_map) + + return target diff --git a/imas/test/test_cli.py b/imas/test/test_cli.py index 130aa287..e696c056 100644 --- a/imas/test/test_cli.py +++ b/imas/test/test_cli.py @@ -3,7 +3,7 @@ import pytest from click.testing import CliRunner -from imas.command.cli import print_version +from imas.command.cli import print_version, convert_ids from imas.command.db_analysis import analyze_db, process_db_analysis from imas.db_entry import DBEntry from imas.test.test_helpers import fill_with_random_data @@ -100,3 +100,38 @@ def test_db_analysis_csv(tmp_path): wall,ids_properties/version_put/data_dictionary,,1.0,1.0 """ # noqa: E501 (line too long) ) + + +def test_imas_convert_with_plasma(tmp_path): + in_db = tmp_path / "in" + out_db = tmp_path / "out" + with DBEntry(f"imas:hdf5?path={in_db}", "w", dd_version="3.39.0") as entry: + for core_edge in ("core", "edge"): + for suffix in ("profiles", "sources", "transport"): + ids = entry.factory.new(f"{core_edge}_{suffix}") + ids.ids_properties.homogeneous_time = 2 + for i in range(4): + ids.ids_properties.comment = f"{core_edge}_{suffix} occurrence {i}" + entry.put(ids, i) + + runner = CliRunner() + with runner.isolated_filesystem(tmp_path): + convert_result = runner.invoke( + convert_ids, + [ + "--convert-to-plasma-ids", + f"imas:hdf5?path={in_db}", + "4.1.0", + f"imas:hdf5?path={out_db}", + ], + ) + assert convert_result.exit_code == 0 + + with DBEntry(f"imas:hdf5?path={out_db}", "r", dd_version="4.1.0") as entry: + for suffix in ("profiles", "sources", "transport"): + for i in range(8): + # We expect 8 occurrences, first 4 core, then 4 edge ones + core_edge = "core" if i < 4 else "edge" + expected_comment = f"{core_edge}_{suffix} occurrence {i % 4}" + ids = entry.get(f"plasma_{suffix}", i) + assert ids.ids_properties.comment == expected_comment diff --git a/imas/test/test_convert_core_edge_plasma.py b/imas/test/test_convert_core_edge_plasma.py new file mode 100644 index 00000000..08d8ca91 --- /dev/null +++ b/imas/test/test_convert_core_edge_plasma.py @@ -0,0 +1,68 @@ +import pytest + +import imas.training +from imas.util import idsdiffgen +from imas.test.test_helpers import fill_with_random_data + + +def assert_equal(core_edge, plasma): + # We only expect the IDS name to be different: + difflist = list(idsdiffgen(core_edge, plasma)) + assert difflist == [("IDS name", core_edge.metadata.name, plasma.metadata.name)] + + +def test_convert_training_core_profiles(): + with imas.training.get_training_db_entry() as entry: + cp = entry.get("core_profiles") + + pp = imas.convert_to_plasma_profiles(cp) + assert_equal(cp, pp) + + +def test_convert_missing_qty(): + cp = imas.IDSFactory("4.1.0").core_profiles() + cp.profiles_1d.resize(1) + cp.profiles_1d[0].ion.resize(1) + cp.profiles_1d[0].ion[0].state.resize(1) + cp.profiles_1d[0].ion[0].state[0].ionization_potential = 0.5 + + pp = imas.convert_to_plasma_profiles(cp) + # check that state[0] is copied, but that it's empty + assert not pp.profiles_1d[0].ion[0].state[0].has_value + + +@pytest.mark.parametrize("idsname", ["core_profiles", "edge_profiles"]) +def test_convert_randomly_filled_profiles(idsname): + ids = imas.IDSFactory("4.1.0").new(idsname) + fill_with_random_data(ids) + + if idsname == "core_profiles": + # ionization_potential doesn't exist in plasma_profiles in DD 4.1.0. This case + # is tested in test_convert_missing_qty. Unset these variables to avoid a diff: + for profiles in list(ids.profiles_1d) + list(ids.profiles_2d): + for ion in profiles.ion: + for state in ion.state: + del state.ionization_potential + del state.ionization_potential_error_upper + del state.ionization_potential_error_lower + + plasma = imas.convert_to_plasma_profiles(ids) + assert_equal(ids, plasma) + + +@pytest.mark.parametrize("idsname", ["core_sources", "edge_sources"]) +def test_convert_randomly_filled_sources(idsname): + ids = imas.IDSFactory("4.1.0").new(idsname) + fill_with_random_data(ids) + + plasma = imas.convert_to_plasma_sources(ids) + assert_equal(ids, plasma) + + +@pytest.mark.parametrize("idsname", ["core_transport", "edge_transport"]) +def test_convert_randomly_filled_transport(idsname): + ids = imas.IDSFactory("4.1.0").new(idsname) + fill_with_random_data(ids) + + plasma = imas.convert_to_plasma_transport(ids) + assert_equal(ids, plasma) diff --git a/setup.cfg b/setup.cfg index 8e5dd292..fe8ea370 100644 --- a/setup.cfg +++ b/setup.cfg @@ -11,9 +11,6 @@ exclude= docs max-line-length = 88 per-file-ignores= - # Ignore import errors in __init__.py (import not at top of file; imported but - # unused) - imas/__init__.py:E402,F401 # Lots of CLASSPATHS in this test file: adhering to line length would be less # readable imas/test/test_dd_helpers.py:E501