diff --git a/.ci/collect_mapdl_logs_locals.sh b/.ci/collect_mapdl_logs_locals.sh index 877ea98545..33a187b8d0 100755 --- a/.ci/collect_mapdl_logs_locals.sh +++ b/.ci/collect_mapdl_logs_locals.sh @@ -27,6 +27,7 @@ echo "Copying the log files..." mv ./*.log ./"$LOG_NAMES"/ || echo "No log files could be found" mv ./*apdl.out ./"$LOG_NAMES"/ || echo "No APDL log files could be found" mv ./*pymapdl.apdl ./"$LOG_NAMES"/ || echo "No PYMAPDL APDL log files could be found" +mv /home/mapdl/dpf_logs ./"$LOG_NAMES"/ || echo "No DPF log files could be found" echo "Copying the profiling files..." mkdir -p ./"$LOG_NAMES"/prof diff --git a/.ci/collect_mapdl_logs_remote.sh b/.ci/collect_mapdl_logs_remote.sh index d0e6242a16..97a3bde9fa 100755 --- a/.ci/collect_mapdl_logs_remote.sh +++ b/.ci/collect_mapdl_logs_remote.sh @@ -25,6 +25,7 @@ echo "Collecting MAPDL logs..." (docker exec "$MAPDL_INSTANCE" /bin/bash -c "if compgen -G '$FILE*.log' > /dev/null ;then mv -f /file*.log /mapdl_logs && echo 'Successfully moved log files.'; fi") || echo "Failed to move the 'log' files into a local file" (docker exec "$MAPDL_INSTANCE" /bin/bash -c "if compgen -G '$WDIR*.crash' > /dev/null ;then mv -f $WDIR*.crash /mapdl_logs && echo 'Successfully moved crash files.'; fi") || echo "Failed to move the 'crash' files into a local file" +docker cp "$MAPDL_INSTANCE":/home/mapdl/dpf_logs ./"$LOG_NAMES"/ || echo "Failed to copy the 'dpf_logs' files into a local directory" docker cp "$MAPDL_INSTANCE":/mapdl_logs/. ./"$LOG_NAMES"/. || echo "Failed to copy the 'log-build-docs' files into a local directory" #### diff --git a/.ci/entrypoint.sh b/.ci/entrypoint.sh new file mode 100755 index 0000000000..ca61a1ee21 --- /dev/null +++ b/.ci/entrypoint.sh @@ -0,0 +1,29 @@ +#!/bin/bash +export OMPI_ALLOW_RUN_AS_ROOT=1 +export OMPI_ALLOW_RUN_AS_ROOT_CONFIRM=1 + +if [ -z "${VERSION}" ]; then + echo "VERSION environment variable is not set. Please set it to the desired Ansys version." + exit 1 +fi + +RUN_DPF_SERVER=${RUN_DPF_SERVER:-false} + +if [ -n "${ANSYS_DPF_ACCEPT_LA}" ]; then + if [ "${ANSYS_DPF_ACCEPT_LA}" == "Y" ]; then + RUN_DPF_SERVER=true + fi +fi + +echo "RUN_DPF_SERVER: $RUN_DPF_SERVER" + +if [ "$RUN_DPF_SERVER" == "true" ]; then + echo "Starting DPF server..." + "/ansys_inc/v${VERSION}/aisol/bin/linx64/Ans.Dpf.Grpc.sh" --port "${DPF_PORT_INTERNAL}" > log_dpf.log & + echo "DPF server started." +fi + +echo "Starting MAPDL..." +echo "Using executable path: ${EXEC_PATH}" + +$EXEC_PATH -grpc -dir /jobs -"${DISTRIBUTED_MODE}" -np 2 -db -6000 -m -6000 - \ No newline at end of file diff --git a/.ci/start_mapdl.sh b/.ci/start_mapdl.sh index 102928aa90..07d418fd4e 100755 --- a/.ci/start_mapdl.sh +++ b/.ci/start_mapdl.sh @@ -65,14 +65,24 @@ fi; if [[ $MAPDL_VERSION == *"cicd"* ]] ; then echo "It is a CICD version, binding DPF port too" - export DPF_ARG="-p ${DPF_PORT}:50055" + if [ "$RUN_DPF_SERVER" == "true" ]; then + echo "RUN_DPF_SERVER is set to true, starting DPF server" + export DPF_ON="-e ANSYS_DPF_ACCEPT_LA=Y" + fi + + export DPF_PORT_INTERNAL=50055 + export DPF_PORT_ARG="-p ${DPF_PORT}:${DPF_PORT_INTERNAL}" export DB_INT_PORT=50056 - echo "DPF_ARG: $DPF_ARG" + echo "DPF_PORT_ARG: $DPF_PORT_ARG" echo "DB_INT_PORT: $DB_INT_PORT" + + echo "Overriding DISTRIBUTED_MODE to 'dmp' for CICD version" + export DISTRIBUTED_MODE="dmp" else - export DPF_ARG="" + export DPF_PORT_ARG="" export DB_INT_PORT=50055 + export DPF_ON="" fi; echo "EXEC_PATH: $EXEC_PATH" @@ -90,17 +100,23 @@ run \ --health-start-period=10s \ -e ANSYSLMD_LICENSE_FILE=1055@${LICENSE_SERVER} \ -e ANSYS_LOCK="OFF" \ + ${DPF_ON} \ -p ${PYMAPDL_PORT}:50052 \ -p ${PYMAPDL_DB_PORT}:${DB_INT_PORT} \ - ${DPF_ARG} \ + ${DPF_PORT_ARG} \ + -e VERSION=${VERSION} \ + -e DPF_PORT_INTERNAL=${DPF_PORT_INTERNAL} \ + -e EXEC_PATH=${EXEC_PATH} \ + -e DISTRIBUTED_MODE=${DISTRIBUTED_MODE} \ --shm-size=2gb \ -e I_MPI_SHM_LMT=shm \ - -e P_SCHEMA="$P_SCHEMA" \ + -e P_SCHEMA=${P_SCHEMA} \ -w /jobs \ -u=0:0 \ --memory=6656MB \ --memory-swap=16896MB \ - ${MAPDL_IMAGE} ${EXEC_PATH} -grpc -dir /jobs -${DISTRIBUTED_MODE} -np 2 -db -5000 -m -5000 - + --mount type=bind,src=${PWD}/.ci/entrypoint.sh,dst=/entrypoint.sh \ + ${MAPDL_IMAGE} /entrypoint.sh _EOT_ ) diff --git a/.github/workflows/test-local.yml b/.github/workflows/test-local.yml index a56306d0bd..d5006e1cb4 100644 --- a/.github/workflows/test-local.yml +++ b/.github/workflows/test-local.yml @@ -123,6 +123,7 @@ jobs: PYTEST_ARGUMENTS: '-vvv -ra --color=yes --durations=30 --random-order --random-order-bucket=class --maxfail=10 --reruns 3 --reruns-delay 4 --cov=ansys.mapdl.core --cov-report=html --timeout=180 --profile-svg --profile --report-log-exclude-logs-on-passed-tests --strict-markers' OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 + DATAPROCESSING_DEBUG: /home/mapdl/dpf_logs container: image: "${{ inputs.package-registry }}:${{ inputs.mapdl-version }}" diff --git a/.github/workflows/test-remote.yml b/.github/workflows/test-remote.yml index 7a59a435b3..3c59a2c115 100644 --- a/.github/workflows/test-remote.yml +++ b/.github/workflows/test-remote.yml @@ -63,11 +63,12 @@ jobs: PYMAPDL_DB_PORT2: 21003 # default won't work on GitHub runners DPF_DOCKER_IMAGE: ghcr.io/ansys/mapdl:v25.2-rocky-dpf-standalone DPF_PORT: 21014 + DPF_PORT_INTERNAL: 50055 # Internal port for DPF server DPF_PORT2: 21015 DPF_START_SERVER: False HAS_DPF: True TEST_DPF_BACKEND: false - PYTEST_ARGUMENTS: '-vvv -ra --color=yes --durations=30 --random-order --random-order-bucket=class --maxfail=5 --reruns 3 --reruns-delay 4 --cov=ansys.mapdl.core --cov-report=html --timeout=180 --profile-svg --profile --report-log-exclude-logs-on-passed-tests --strict-markers' + PYTEST_ARGUMENTS: '-vvv -ra --color=yes --durations=30 --random-order --random-order-bucket=class --maxfail=10 --reruns 3 --reruns-delay 4 --cov=ansys.mapdl.core --cov-report=html --timeout=180 --profile-svg --profile --report-log-exclude-logs-on-passed-tests --strict-markers --random-order-seed=650199' MAPDL_PACKAGE: ghcr.io/ansys/mapdl steps: @@ -149,13 +150,16 @@ jobs: MAPDL_VERSION: ${{ inputs.mapdl-version }} DISTRIBUTED_MODE: ${{ steps.distributed_mode.outputs.distributed_mode }} MAPDL_PACKAGE: ${{ env.MAPDL_PACKAGE }} + DPF_PORT_INTERNAL: ${{ env.DPF_PORT_INTERNAL }} shell: bash run: | echo "Launching first MAPDL instance..." export INSTANCE_NAME=MAPDL_0 + export RUN_DPF_SERVER=true .ci/start_mapdl.sh &> mapdl_launch_0.log & export DOCKER_PID_0=$! echo "Launching a second instance for MAPDL pool testing..." + export RUN_DPF_SERVER=false export PYMAPDL_PORT=${{ env.PYMAPDL_PORT2 }} export PYMAPDL_DB_PORT=${{ env.PYMAPDL_DB_PORT2 }} export INSTANCE_NAME=MAPDL_1 @@ -222,17 +226,6 @@ jobs: run: | python -m pip install .[tests] - - name: "Start DPF server on same container as MAPDL" - if: ${{ steps.ubuntu_check.outputs.ON_SAME_CONTAINER == 'true' }} - shell: bash - env: - MAPDL_INSTANCE: "MAPDL_0" - DPF_PORT: "${{ env.DPF_PORT }}" - run: | - docker ps - echo "Starting DPF server on same MAPDL container: ${MAPDL_INSTANCE}" - docker exec ${MAPDL_INSTANCE} /bin/bash -c "/ansys_inc/v252/aisol/bin/linx64/Ans.Dpf.Grpc.sh --port 50055 &" > log_dpf.log & - - name: "Waiting for the services to be up" shell: bash env: @@ -265,14 +258,14 @@ jobs: --log-file-level="DEBUG" - name: "Print amount of restarts" + if: always() run: | N_RESTART=$(docker inspect --format '{{ .RestartCount }}' MAPDL_0) - echo "Number of restarts in MAPDL_0 container: $N_RESTART" + echo "Number of restarts in the MAPDL_0 container: $N_RESTART" N_RESTART=$(docker inspect --format '{{ .RestartCount }}' MAPDL_1) - echo "Number of restarts in MAPDL_1 container: $N_RESTART" + echo "Number of restarts in the MAPDL_1 container: $N_RESTART" - name: "Upload pytest reports to GitHub" - if: always() uses: actions/upload-artifact@v4.6.2 with: name: "reports-${{ inputs.file-name }}" diff --git a/doc/changelog.d/1300.maintenance.md b/doc/changelog.d/1300.maintenance.md new file mode 100644 index 0000000000..69852f3c52 --- /dev/null +++ b/doc/changelog.d/1300.maintenance.md @@ -0,0 +1 @@ +Feat: using dpf instead of reader in "results" module \ No newline at end of file diff --git a/doc/changelog.d/4098.miscellaneous.md b/doc/changelog.d/4098.miscellaneous.md new file mode 100644 index 0000000000..9571213165 --- /dev/null +++ b/doc/changelog.d/4098.miscellaneous.md @@ -0,0 +1 @@ +Feat: using entrypoint to start mapdl \ No newline at end of file diff --git a/src/ansys/mapdl/core/__init__.py b/src/ansys/mapdl/core/__init__.py index 7baf833d9a..845a112ec7 100644 --- a/src/ansys/mapdl/core/__init__.py +++ b/src/ansys/mapdl/core/__init__.py @@ -66,13 +66,14 @@ # Import related globals _HAS_ATP: bool = is_installed("ansys.tools.path") _HAS_CLICK: bool = is_installed("click") -_HAS_PIM: bool = is_installed("ansys.platform.instancemanagement") +_HAS_DPF: bool = is_installed("ansys.dpf.core") +_HAS_MATPLOTLIB: bool = is_installed("matplotlib") _HAS_PANDAS: bool = is_installed("pandas") +_HAS_PIM: bool = is_installed("ansys.platform.instancemanagement") _HAS_PYANSYS_REPORT: bool = is_installed("ansys.tools.report") _HAS_PYVISTA: bool = is_installed("pyvista") _HAS_REQUESTS: bool = is_installed("requests") _HAS_TQDM: bool = is_installed("tqdm") -_HAS_MATPLOTLIB: bool = is_installed("matplotlib") _HAS_VISUALIZER: bool = ( is_installed("ansys.tools.visualization_interface") and _HAS_MATPLOTLIB ) diff --git a/src/ansys/mapdl/core/launcher.py b/src/ansys/mapdl/core/launcher.py index e3dde28efa..f0d23aff26 100644 --- a/src/ansys/mapdl/core/launcher.py +++ b/src/ansys/mapdl/core/launcher.py @@ -148,6 +148,7 @@ def version_from_path(*args: Any, **kwargs: Any) -> int | None: "just_launch", "on_pool", "graphics_backend", + "use_reader_backend", ] ON_WSL = os.name == "posix" and ( diff --git a/src/ansys/mapdl/core/mapdl_core.py b/src/ansys/mapdl/core/mapdl_core.py index b5711ba590..35af3b17e2 100644 --- a/src/ansys/mapdl/core/mapdl_core.py +++ b/src/ansys/mapdl/core/mapdl_core.py @@ -43,7 +43,7 @@ from ansys.mapdl import core as pymapdl from ansys.mapdl.core import LOG as logger -from ansys.mapdl.core import _HAS_VISUALIZER +from ansys.mapdl.core import _HAS_DPF, _HAS_VISUALIZER from ansys.mapdl.core.commands import ( CMD_BC_LISTING, CMD_LISTING, @@ -88,6 +88,9 @@ from ansys.mapdl.core.solution import Solution from ansys.mapdl.core.xpl import ansXpl + if _HAS_DPF: + from ansys.mapdl.core.reader import DPFResult + from ansys.mapdl.core.post import PostProcessing MAX_PARAM_CHARS = 32 @@ -211,6 +214,7 @@ "start_instance", "start_timeout", "timeout", + "use_reader_backend", ] @@ -264,7 +268,7 @@ def __init__( local: bool = True, print_com: bool = False, file_type_for_plots: VALID_FILE_TYPE_FOR_PLOT_LITERAL = "PNG", - **start_parm, + **start_parm: dict[str, Any], ): """Initialize connection with MAPDL.""" self._show_matplotlib_figures = True # for testing @@ -285,6 +289,7 @@ def __init__( self._version = None # cached version self._mute = False self._save_selection_obj = None + self._use_reader_backend: bool = start_parm.pop("use_reader_backend", True) if _HAS_VISUALIZER: if graphics_backend is not None: # pragma: no cover @@ -362,6 +367,9 @@ def __init__( self._info = Information(self) + # DPF + self._dpf_result: "DPFResult | None" = None + def _after_run(self, _command: str) -> None: pass @@ -1072,7 +1080,11 @@ def graphics_backend(self, value: GraphicsBackend): @property @requires_package("ansys.mapdl.reader", softerror=True) def result(self): - """Binary interface to the result file using :class:`ansys.mapdl.reader.rst.Result`. + """Binary interface to the result file using ``ansys-dpf-core`` or + ``ansys-mapdl-reader``. + + If `ansys-dpf-core` is not installed, then a :class:`ansys.mapdl.reader.rst.Result` + object is returned. Returns ------- @@ -1106,12 +1118,21 @@ def result(self): NSL : Nodal displacements RF : Nodal reaction forces """ + if _HAS_DPF and not self._use_reader_backend: + from ansys.mapdl.core.reader import DPFResult + + if self._dpf_result is None: + # create a DPFResult object + self._dpf_result = DPFResult(None, mapdl=self, logger=self._log) + + return self._dpf_result + from ansys.mapdl.reader import read_binary from ansys.mapdl.reader.rst import Result if not self._local: # download to temporary directory - save_path = os.path.join(tempfile.gettempdir()) + save_path = tempfile.mkdtemp(suffix=f"ansys_tmp_{random_string()}") result_path = self.download_result(save_path) else: if self._distributed_result_file and self._result_file: @@ -1141,10 +1162,12 @@ def result(self): else: result_path = self._result_file - if result_path is None: - raise FileNotFoundError("No result file(s) at %s" % self.directory) - if not os.path.isfile(result_path): - raise FileNotFoundError("No results found at %s" % result_path) + if result_path is None or not os.path.isfile(result_path): + raise FileNotFoundError( + f"No result file(s) at {self.directory or result_path}. " + "Check that there is at least one RST file in the working directory " + f"'{self.directory}, or solve an MAPDL model to generate one." + ) return read_binary(result_path) @@ -1299,7 +1322,7 @@ def _result_file(self): if os.path.isfile(filename): return filename else: - return f"{filename}.{ext}" + return self.directory / f"{filename}.{ext}" def _wrap_listing_functions(self): # Wrapping LISTING FUNCTIONS. diff --git a/src/ansys/mapdl/core/misc.py b/src/ansys/mapdl/core/misc.py index c7ae2623cc..7c2d3e7859 100644 --- a/src/ansys/mapdl/core/misc.py +++ b/src/ansys/mapdl/core/misc.py @@ -106,6 +106,24 @@ def is_float(input_string: str) -> bool: return False +def get_local_ip(): + """Get the local IP address of this machine. + + It uses a socket to determine the local IP address, if fails, it returns local IP. + """ + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.settimeout(0) + try: + # doesn't even have to be reachable + s.connect(("10.254.254.254", 1)) + ip = s.getsockname()[0] + except Exception: + ip = "127.0.0.1" + finally: + s.close() + return ip + + def random_string(stringLength: int = 10, letters: str = string.ascii_lowercase) -> str: """Generate a random string of fixed length""" import secrets diff --git a/src/ansys/mapdl/core/reader/__init__.py b/src/ansys/mapdl/core/reader/__init__.py new file mode 100644 index 0000000000..d0ad60186c --- /dev/null +++ b/src/ansys/mapdl/core/reader/__init__.py @@ -0,0 +1,23 @@ +# Copyright (C) 2016 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from .result import DPFResult diff --git a/src/ansys/mapdl/core/reader/result.py b/src/ansys/mapdl/core/reader/result.py new file mode 100644 index 0000000000..546638642d --- /dev/null +++ b/src/ansys/mapdl/core/reader/result.py @@ -0,0 +1,2889 @@ +# Copyright (C) 2016 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from functools import wraps +import os +import pathlib +import socket +import tempfile +from typing import TYPE_CHECKING, Any, Callable, Iterable, Literal +import weakref + +# from ansys.dpf import post +import numpy as np + +from ansys.mapdl.core import _HAS_DPF, _HAS_PYVISTA, LOG, Logger, Mapdl # type: ignore +from ansys.mapdl.core.errors import MapdlRuntimeError +from ansys.mapdl.core.misc import check_valid_ip, get_local_ip, parse_ip_route + +COMPONENTS: list[str] = ["X", "Y", "Z", "XY", "YZ", "XZ"] + +if _HAS_DPF: + from ansys.dpf import core as dpf + from ansys.dpf.core import Model + from ansys.dpf.core.errors import DPFServerException + + +if TYPE_CHECKING and _HAS_PYVISTA: + import pyvista as pv + +LOCATION_MAPPING: dict[str, str] = { + "NODE": "Nodal", + "ELEM": "Elemental", +} + +MATERIAL_PROPERTIES: list[str] = [ + "EX", + "EY", + "EZ", + "ALPX", + "ALPY", + "ALPZ", + "REFT", + "PRXY", + "PRYZ", + "PRX", + "NUXY", + "NUYZ", + "NUXZ", + "GXY", + "GYZ", + "GXZ", + "DAMP", + "MU", + "DENS", + "C", + "ENTH", + "KXX", + "KYY", + "KZZ", + "HF", + "EMIS", + "QRATE", + "VISC", + "SONC", + "RSVX", + "RSVY", + "RSVZ", + "PERX", + "PERY", + "PERZ", + "MURX", + "MURY", + "MURZ", + "MGXX", + "MGYY", + "MGZZ", + "XTEN", + "XCMP", + "YTEN", + "YCMP", + "ZTEN", + "ZCMP", + "XY", + "YZ", + "XZ", + "XYCP", + "YZCP", + "XZCP", + "XZIT", + "XZIC", + "YZIT", + "YZIC", +] + + +NOT_AVAILABLE_METHOD = """The method '{method}' has not been ported to the new DPF-based Results backend. +If you still want to use it, you can switch to 'pymapdl-reader' backend.""" + + +class ResultNotFound(MapdlRuntimeError): + """Results not found""" + + def __init__(self, msg: str = ""): + MapdlRuntimeError.__init__(self, msg) + + +def update_result(function: Callable[..., Any]) -> Callable[..., Any]: + """ + Decorator to wrap :class:`DPFResult ` + methods to force update the RST when accessed the first time. + + Parameters + ---------- + update : bool, optional + If ``True``, the class information is updated by calling ``/STATUS`` + before accessing the methods. By default ``False`` + """ + + @wraps(function) + def wrapper(self, *args, **kwargs): + if self._update_required or not self._loaded or self._cached_dpf_model is None: + self.update() + self.logger.debug("RST file updated.") + return function(self, *args, **kwargs) + + return wrapper + + +class DPFResult: + """ + Result object based on DPF library. + + + This class replaces the class Result in PyMAPDL-Reader. + + The + + Parameters + ---------- + rst_file_path : str + Path to the RST file. + + mapdl : _MapdlCore + Mapdl instantiated object. + + rst_is_on_remote : bool, optional + If True, the RST file is located on the remote server already. + If False, the RST file is located on the local machine, and it will be + uploaded to the DPF server + + """ + + def __init__( + self, + rst_file_path: str | None = None, + mapdl: "Mapdl | None" = None, + rst_is_on_remote: bool = False, + logger: Logger | None = None, + ) -> None: + """Initialize Result instance""" + + if not _HAS_DPF: + raise ModuleNotFoundError( + "The DPF library is not installed. Please install it using 'pip install ansys-dpf-core'." + ) + + self._mapdl_weakref: weakref.ref["Mapdl"] | None = None + self._server_file_path: str | None = None # In case DPF is remote. + self._logger: Logger | None = logger + + # RST parameters + self.__rst_directory: str | None = None + self.__rst_name: str | None = None + self._mode_rst: bool + + if rst_file_path is not None and mapdl is not None: + raise ValueError( + "Only one the arguments must be supplied: 'rst_file_path' or 'mapdl'." + ) + + elif rst_file_path is not None: + # Using RST file only allows for one RST file at the time. + if not rst_is_on_remote and not os.path.exists(rst_file_path): + raise FileNotFoundError( + f"The RST file '{rst_file_path}' could not be found." + ) + elif rst_is_on_remote: + self._server_file_path = rst_file_path + + self.logger.debug("Initializing DPFResult class in RST mode.") + self._mode_rst = True + + self.__rst_directory = os.path.dirname(rst_file_path) + self.__rst_name = os.path.basename(rst_file_path) + + elif mapdl is not None: + # Using MAPDL instance allows to switch between RST files. + if not isinstance(mapdl, Mapdl): # pragma: no cover # type: ignore + raise TypeError("Must be initialized using Mapdl instance") + + self.logger.debug("Initializing DPFResult class in MAPDL mode.") + self._mapdl_weakref = weakref.ref(mapdl) + self._mode_rst = False + + else: + raise ValueError( + "One of the following kwargs must be supplied: 'rst_file_path' or 'mapdl'" + ) + + # dpf + # If True, it triggers a update on the RST file + self._update_required: bool = False + self._loaded: bool = False + self._cached_dpf_model: "dpf.Model" | None = None + self._connected: bool = False + self._server: dpf.server_types.BaseServer | None = None + self._tmp_dir: str | None = ( + None # Temporary directory to store the RST file locally + ) + self.__mapdl_and_dpf_on_same_machine: bool | None = ( + None # True if the DPF server is running on the same machine as MAPDL + ) + self._dpf_is_remote: bool | None = None # Whether DPF is remote or not + self._dpf_ip: str | None = None + self._rst_is_on_remote: bool = rst_is_on_remote + + # old attributes + # ELEMENT_INDEX_TABLE_KEY = None # todo: To fix + # ELEMENT_RESULT_NCOMP = None # todo: to fix + + # Let's try to delay the loading of the RST file until the first access + # self._update() # Loads the RST file and sets the dpf model + + def _get_is_remote(self) -> bool: + """Check if the DPF server is running on a remote machine.""" + if not hasattr(self.server, "ip"): + return False + + own_ip = get_local_ip() + dpf_ip = self.server.ip if self.server else "" + return own_ip != dpf_ip + + def _get_is_same_machine(self) -> bool | None: + """ + Check if the MAPDL and the DPF instances are running on the same machine. + + """ + if self.mapdl is None: + self.logger.warning( + "MAPDL instance is not provided. Cannot determine if MAPDL and DPF are running on the same machine." + ) + return None + else: + mapdl = self.mapdl + + # The 'ifconfig' output is reliable in terms of order of the IP address, + # however it is not installed by default on all systems. + # The 'hostname -I' command is more widely available, but it may return + # multiple IP addresses, hence we are going to try both. + cmds = [ + r"ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1'", + "hostname -I | cut -d' ' -f1", + ] + mapdl_ip = None + for cmd in cmds: + if output := mapdl.sys(cmd): + # If the command returns an IP address, it means MAPDL is running on a local machine. + mapdl_ip = parse_ip_route(output) + if check_valid_ip(mapdl_ip): + break + + self.logger.debug( + f"MAPDL IP address determined as: {mapdl_ip} using command: {cmd}" + ) + self._mapdl_ip = mapdl_ip or mapdl.ip + self.logger.debug(f"Using MAPDL IP address: {self._mapdl_ip}") + + # Get DPF server IP + dpf_ip = self.dpf_ip + + if mapdl_ip != dpf_ip: + self.logger.debug( + f"DPF server IP ({dpf_ip}) is different from MAPDL IP ({mapdl_ip})." + ) + return False + + # Check MAPDL can find the route + mapdl_version = str(mapdl.version).replace(".", "") # Version as 252 + awp_root = ( + mapdl.inquire("", "env", f"AWP_ROOT{mapdl_version}") + or f"/ansys_inc/v{mapdl_version}" + ) + + dpf_executable = f"{awp_root}/aisol/bin/linx64/Ans.Dpf.Grpc.exe" + if mapdl.inquire("", "exist", dpf_executable): + self.logger.debug( + f"DPF executable found at {dpf_executable}. MAPDL and DPF are running on the same machine." + ) + return True + else: + self.logger.debug( + f"DPF executable not found at {dpf_executable}. MAPDL and DPF are NOT running on the same machine." + ) + return False + + def _connect_to_dpf_using_mode( + self, + mode: Literal["InProcess", "LocalGrpc", "RemoteGrpc"] = "InProcess", + external_ip: str | None = None, + external_port: int | None = None, + ): + if mode == "InProcess": + dpf.server.set_server_configuration( + dpf.server_factory.AvailableServerConfigs.InProcessServer + ) + srvr = dpf.server.start_local_server() + elif mode == "LocalGrpc": + dpf.server.set_server_configuration( + dpf.server_factory.AvailableServerConfigs.GrpcServer + ) + srvr = dpf.server.start_local_server() + elif mode == "RemoteGrpc": + dpf.server.set_server_configuration( + dpf.server_factory.AvailableServerConfigs.GrpcServer + ) + if external_ip is not None and external_port is not None: + srvr = dpf.server.connect_to_server(ip=external_ip, port=external_port) + else: + raise ValueError( + "external_ip and external_port should be provided for RemoteGrpc communication" + ) + + self._server = srvr + + def _get_dpf_ip(self) -> str: + return self.server.ip if self.server and hasattr(self.server, "ip") else "" + + @property + def dpf_ip(self) -> str: + if self._dpf_ip is None: + self._dpf_ip = self._get_dpf_ip() + return self._dpf_ip + + @property + def server(self) -> "dpf.server_types.BaseServer": + """ + Return the DPF server connection. + + Returns + ------- + dpf.server_types.BaseServer + The DPF server connection. + """ + if self._server is None: + self.connect_to_server() + + return self._server + + @property + def rst_is_on_remote(self) -> bool: + return self._rst_is_on_remote + + def _try_connect_inprocess(self) -> None: + try: + self._connect_to_dpf_using_mode(mode="InProcess") + self._connected = True + self.logger.debug("Connected to DPF server using InProcess.") + except DPFServerException: # type: ignore # probably should filter a bit here + self._connected = False + + def _try_connect_localgrpc(self) -> None: + try: + self._connect_to_dpf_using_mode(mode="LocalGrpc") + self._connected = True + self.logger.debug("Connected to DPF server using LocalGrpc.") + except DPFServerException: # type: ignore # probably should filter a bit here + self._connected = False + + def _try_connect_remote_grpc(self, dpf_ip: str, dpf_port: int) -> None: + try: + self._connect_to_dpf_using_mode( + mode="RemoteGrpc", external_ip=dpf_ip, external_port=dpf_port + ) + self._connected = True + self.logger.debug( + f"Connected to DPF server using RemoteGrpc on {dpf_ip}:{dpf_port}." + ) + except DPFServerException: # type: ignore + self._connected = False + + def _iterate_connections(self, dpf_ip: str, dpf_port: int) -> None: + if not self._connected: + self._try_connect_remote_grpc(dpf_ip, dpf_port) + + if not self._connected: + self._try_connect_inprocess() + + if not self._connected: + self._try_connect_localgrpc() + + if self._connected: + return + else: + raise DPFServerException( + "Could not connect to DPF server after trying all the available options." + ) + + def _get_dpf_env_vars( + self, ip: str | None = None, port: int | None = None + ) -> tuple[str, int]: + if ip is not None: + dpf_ip = ip + elif "DPF_IP" in os.environ: + dpf_ip = os.environ["DPF_IP"] + elif self.mapdl: + dpf_ip = self.mapdl.ip + else: + dpf_ip = "127.0.0.1" + + if port is not None: + dpf_port = port + elif "DPF_PORT" in os.environ: + dpf_port = int(os.environ["DPF_PORT"]) + elif self.mapdl: + dpf_port = self.mapdl.port + 3 + else: + dpf_port = 50055 + + return dpf_ip, dpf_port + + def _connect_to_dpf(self, ip: str, port: int) -> None: + if not self._mode_rst and self._mapdl and not self._mapdl.is_local: + self.logger.debug("Connecting to a remote gRPC DPF server") + self._try_connect_remote_grpc(ip, port) + + else: + # any connection method is supported because the file local. + self.logger.debug("Attempting any connection method") + self._iterate_connections(ip, port) + + def connect_to_server(self, ip: str | None = None, port: int | None = None) -> None: + """ + Connect to the DPF Server. + + Parameters + ---------- + ip : str, optional + IP address of the server, by default "127.0.0.1" + port : int, optional + Server Port, by default 50054 + + Returns + ------- + dpf.server_types.GrpcServer + Return the server connection. + + Raises + ------ + MapdlRuntimeError + If it cannot connect to an instance at the specified IP and port. + + Notes + ----- + You can also set the ``ip`` and ``port`` values using the environment variables + ``DPF_PORT`` and ``DPF_IP``. + In case these variables are set, and the inputs of this function are not ``None``, + the priority order is: + + 1. Values supplied to this function. + 2. The environment variables + 3. The MAPDL stored values (if working on MAPDL mode) + 3. The default values + """ + + ip, port = self._get_dpf_env_vars(ip, port) + + # resolve ip + ip = socket.gethostbyname(ip) + + check_valid_ip(ip) + + self._connect_to_dpf(ip, port) + + def _dpf_remote_envvars(self): + """Return True if any of the env variables are set""" + return "DPF_IP" in os.environ or "DPF_PORT" in os.environ + + @property + def dpf_is_remote(self) -> bool: + """Returns True if we are connected to the DPF Server using a gRPC connection to a remote IP.""" + if self._dpf_is_remote is None: + self._dpf_is_remote = self._get_is_remote() + return self._dpf_is_remote + + @property + def _mapdl(self) -> "Mapdl | None": + """Return the weakly referenced instance of MAPDL""" + if self._mapdl_weakref: + return self._mapdl_weakref() + + @property + def mapdl(self): + """Return the MAPDL instance""" + return self._mapdl + + @property + def _log(self) -> Logger: + """Alias for mapdl logger""" + if self._logger is None: + self._logger = LOG + return self._logger + + @property + def logger(self) -> Logger: + """Logger property""" + return self._log + + @logger.setter + def logger(self, logger: Logger) -> None: + if self.mode_mapdl: + raise ValueError( + "Cannot set logger in MAPDL mode. Use the MAPDL instance methods to set the logger instead." + ) + self._logger = logger + + @property + def mode(self): + return "RST" if self._mode_rst else "MAPDL" + + @property + def mode_rst(self): + return bool(self._mode_rst) + + @property + def mode_mapdl(self): + return not self._mode_rst + + @property + def _mapdl_dpf_on_same_machine(self): + """True if the DPF server is running on the same machine as MAPDL""" + if self.__mapdl_and_dpf_on_same_machine is None: + self.__mapdl_and_dpf_on_same_machine = self._get_is_same_machine() + return self.__mapdl_and_dpf_on_same_machine + + @property + def _is_thermal(self): + """Return True if there are TEMP DOF in the solution.""" + return hasattr(self.model.results, "temperature") + + @property + def _is_distributed(self): + # raise NotImplementedError("To be implemented by DPF") + return False # Hardcoded until DPF exposure + + @property + def is_distributed(self): + """True when this result file is part of a distributed result + + Only True when Global number of nodes does not equal the + number of nodes in this file. + + Notes + ----- + Not a reliabile indicator if a cyclic result. + """ + return self._is_distributed + + @property + def _rst(self): + if self.mode_mapdl: + # because it might be remote + return self.mapdl.result_file + + else: + return os.path.join(self._rst_directory, self._rst_name) + + @property + def mapdl_is_local(self): + if self.mapdl: + return self._mapdl.is_local + + @property + def _rst_directory(self) -> str: + if self.mapdl: + self.__rst_directory = os.path.dirname(self.mapdl.result_file) # type: ignore + return self.__rst_directory # type: ignore + + @property + def _rst_name(self) -> str: + if self.mapdl: + # update always + self.__rst_name = os.path.basename(self.mapdl.result_file) + return self.__rst_name + + def update( + self, progress_bar: bool | None = None, chunk_size: int | None = None + ) -> None: + """Update the DPF Model + + Parameters + ---------- + progress_bar : bool, optional + If True, display a progress bar during the update process. If None, the default behavior is used. + + chunk_size : int, optional + Number of items to process per chunk. If None, the default chunk size is used. + """ + return self._update(progress_bar=progress_bar, chunk_size=chunk_size) + + def _update( + self, progress_bar: bool | None = None, chunk_size: int | None = None + ) -> None: + if self.mode_mapdl: + self._update_rst(progress_bar=progress_bar, chunk_size=chunk_size) + + # Upload it to DPF if we are not in local + if self.dpf_is_remote and not self._mapdl_dpf_on_same_machine: + self._upload_to_dpf() + elif self.dpf_is_remote and not self.rst_is_on_remote: + # If the RST is not on the remote server, we need to upload it + self._upload_to_dpf() + + # Updating model + self._build_dpf_object() + + # Resetting flag + self._loaded = True + self._update_required = False + + def _upload_to_dpf(self): + if self.mode_mapdl and self._mapdl_dpf_on_same_machine is True: + self._log.debug("Updating server file path for DPF model.") + self._server_file_path = os.path.join( + self._mapdl.directory, self._mapdl.result_file + ) + elif self.mode_rst and not self.dpf_is_remote: + self._server_file_path = self._rst + else: + # Upload to DPF is broken on Ubuntu: https://github.com/ansys/pydpf-core/issues/2254 + # self._server_file_path = dpf.upload_file_in_tmp_folder(self._rst) + raise NotImplementedError( + "Uploading to DPF is not implemented yet. " + "Please use the local mode for now." + ) + + def _update_rst( + self, + progress_bar: bool | None = None, + chunk_size: int | None = None, + save: bool = True, + ) -> None: + """Update RST from MAPDL instance + + Parameters + ---------- + progress_bar: bool + Whether print or not the progress bar during the RST file uploading + + chunk_size: int + The value of the size of the chunk used to upload the file. + + save: bool + Whether save the model or not before update the RST file + """ + # Saving model + if save: + self.mapdl.save() # type: ignore + + if self.mapdl_is_local: + rst_file_exists = os.path.exists(self._rst) + else: + rst_file_exists = self.mapdl.inquire("", "exist", self._rst) + + if not rst_file_exists: + raise FileNotFoundError( + f"The result file could not be found in {self.mapdl.directory}" + ) + + if self.mapdl_is_local is False and self._mapdl_dpf_on_same_machine is False: + self._log.debug("Updating the local copy of remote RST file.") + # download file + self._tmp_dir = tempfile.gettempdir() + self.mapdl.download( # type: ignore + self._rst, + self._tmp_dir, + progress_bar=progress_bar, + chunk_size=chunk_size, + ) + + def _build_dpf_object(self): + if self._log: + self._log.debug("Building/Updating DPF Model object.") + + if self.dpf_is_remote and not self._mapdl_dpf_on_same_machine: + rst = self._server_file_path + else: + rst = self._rst + + self._cached_dpf_model = Model(str(rst)) + + @property + def model(self): + """Returns the DPF model object.""" + if self._cached_dpf_model is None or self._update_required: + self._update() + + return self._cached_dpf_model + + @property + def metadata(self) -> "dpf.model.Metadata": + return self.model.metadata + + @property + def mesh(self) -> "dpf.MeshedRegion": + """Mesh from result file.""" + # TODO: this should be a class equivalent to reader.mesh class. + return self.model.metadata.meshed_region + + @property + def grid(self) -> "pv.UnstructuredGrid": + return self.mesh.grid + + def _get_entities_ids( + self, + entities: str | int | float | Iterable[str | int | float], + entity_type: str = "Nodal", + ) -> Iterable[int | float]: + """Get entities ids given their ids, or component names. + + If a list is given it checks can be int, floats, or list/tuple of int/floats, or + components (strs, or iterable[strings]) + + Parameters + ---------- + entities : str | int | float | Iterable[str | int | float] + Entities ids or components. If a mix of strings and numbers is + provided in the iterable, a ValueError will be raised. + + entity_type : str, optional + Type of entity, by default "Nodal" + + Returns + ------- + list + List of entities ids. + + Raises + ------ + ValueError + The argument 'entity_type' can only be 'Nodal' or 'Elemental' + TypeError + Only ints, floats, strings or iterable of the previous ones are allowed. + ValueError + The named selection '{each_named_selection}' does not exist. + ValueError + The named selection '{each_named_selection}' does not contain {entity_type} information. + """ + if entity_type.lower() not in ["nodal", "elemental"]: + raise ValueError( + "The argument 'entity_type' can only be 'Nodal' or 'Elemental'. " + ) + else: + entity_type = entity_type.title() # Sanity check + + if entities is None: + return entities + + elif isinstance(entities, (int, float, str)): + entities = [entities] + + if isinstance(entities, Iterable): # type: ignore + if all([isinstance(each, (int, float)) for each in entities]): + return entities # type: ignore + elif all([isinstance(each, str) for each in entities]): + # Need to map the components to the ids. + pass + else: + raise ValueError("Strings and numbers are not allowed together.") + + else: + raise TypeError( + "Only ints, floats, strings or iterable of the previous ones are allowed." + ) + + # For components selections: + entities_: list[int] = [] + available_ns: list[str] = self.mesh.available_named_selections + + for each_named_selection in entities: + if each_named_selection not in available_ns: + raise ValueError( + f"The named selection '{each_named_selection}' does not exist." + ) + + scoping = self.mesh.named_selection(each_named_selection) + if scoping.location != entity_type: + raise ValueError( + f"The named selection '{each_named_selection}' does not contain {entity_type} information." + ) + + entities_.extend(scoping.ids.tolist()) + + return entities_ + + def _get_principal(self, op: "dpf.Operator") -> np.ndarray[Any, Any]: + fc: dpf.FieldsContainer = op.outputs.fields_as_fields_container()[ + 0 + ] # This index 0 is the step indexing. + + op1 = dpf.operators.invariant.principal_invariants() + op1.inputs.field.connect(fc) + # Get output data + result_field_eig_1 = op.outputs.field_eig_1() + result_field_eig_2 = op.outputs.field_eig_2() + result_field_eig_3 = op.outputs.field_eig_3() + + op2 = dpf.operators.invariant.invariants() + op2.inputs.field.connect(fc) + + # Get output data + result_field_int = op.outputs.field_int() + result_field_eqv = op.outputs.field_eqv() + # result_field_max_shear = op.outputs.field_max_shear() + + return np.hstack( + ( + result_field_eig_1, + result_field_eig_2, + result_field_eig_3, + result_field_int, + result_field_eqv, + ) + ) + + def _extract_data( + self, op: "dpf.Operator" + ) -> tuple[np.ndarray[Any, Any], np.ndarray[Any, Any]]: + fc = op.outputs.fields_as_fields_container()[ + 0 + ] # This index 0 is the step indexing. + + # When we destroy the operator, we might lose access to the array, that is why we copy. + ids = fc.scoping.ids.copy() + data = fc.data.copy() + return ids, data + + def _set_rescope(self, op: "dpf.Operator", scope_ids: list[int]) -> "dpf.Operator": + fc = op.outputs.fields_container() + + rescope = dpf.operators.scoping.rescope() + rescope.inputs.mesh_scoping(sorted(scope_ids)) + rescope.inputs.fields(fc) + return rescope + + def _set_mesh_scoping( + self, + op: "dpf.Operator", + mesh: "dpf.MeshedRegion", + requested_location: Literal["nodal", "elemental_nodal", "elemental"], + scope_ids: list[int] | None = None, + ): + + scop = dpf.Scoping() + requested_location = requested_location.lower() # type: ignore + + if requested_location == "nodal": + scop.location = dpf.locations.nodal + scop.ids = scope_ids if scope_ids else mesh.nodes.scoping.ids + + elif requested_location == "elemental_nodal": + if scope_ids: + scop.ids = scope_ids + else: + scop.ids = mesh.elements.scoping.ids + + elif requested_location == "elemental": + scop.location = dpf.locations.elemental + if scope_ids: + scop.ids = scope_ids + else: + scop.ids = mesh.elements.scoping.ids + else: + raise ValueError( + f"The 'requested_location' value ({requested_location}) is not allowed." + ) + op.inputs.mesh_scoping.connect(scop) + return scop.ids + + def _set_element_results(self, op, mesh): + + fc = op.outputs.fields_container() + + op2 = dpf.operators.averaging.to_elemental_fc(collapse_shell_layers=True) + op2.inputs.fields_container.connect(fc) + op2.inputs.mesh.connect(mesh) + + return op2 + + def _set_input_timestep_scope(self, op, rnum): + + if not rnum: + rnum = [int(1)] + elif isinstance(rnum, (int, float)): + rnum = [rnum] + elif isinstance(rnum, (list, tuple)): + rnum = [self.parse_step_substep(rnum)] + else: + raise TypeError("Only 'int' and 'float' are supported to define the steps.") + + my_time_scoping = dpf.Scoping() + my_time_scoping.location = "time_freq_steps" # "time_freq" + my_time_scoping.ids = rnum + + op.inputs.time_scoping.connect(my_time_scoping) + + def _get_operator(self, result_field): + if not hasattr(self.model.results, result_field): + list_results = "\n ".join( + [each for each in dir(self.model.results) if not each.startswith("_")] + ) + raise ResultNotFound( + f"The result '{result_field}' cannot be found on the RST file. " + f"The current results are:\n {list_results}" + ) + + # Getting field + return getattr(self.model.results, result_field)() + + def _get_nodes_result( + self, + rnum, + result_type, + in_nodal_coord_sys=False, + nodes=None, + return_operator=False, + ): + return self._get_result( + rnum, + result_type, + requested_location="Nodal", + scope_ids=nodes, + result_in_entity_cs=in_nodal_coord_sys, + return_operator=return_operator, + ) + + def _get_elem_result( + self, + rnum, + result_type, + in_element_coord_sys=False, + elements=None, + return_operator=False, + ): + return self._get_result( + rnum, + result_type, + requested_location="Elemental", + scope_ids=elements, + result_in_entity_cs=in_element_coord_sys, + return_operator=return_operator, + ) + + def _get_elemnodal_result( + self, + rnum, + result_type, + in_element_coord_sys=False, + elements=None, + return_operator=False, + ): + return self._get_result( + rnum, + result_type, + requested_location="Elemental_Nodal", + scope_ids=elements, + result_in_entity_cs=in_element_coord_sys, + return_operator=return_operator, + ) + + @update_result + def _get_result( + self, + rnum, + result_field, + requested_location="Nodal", + scope_ids=None, + result_in_entity_cs=False, + return_operator=False, + ): + """ + Get elemental/nodal/elementalnodal results. + + Parameters + ---------- + rnum : int + Result step/set + result_field : str + Result type, for example "stress", "strain", "displacement", etc. + requested_location : str, optional + Results given at which type of entity, by default "Nodal" + scope_ids : Union([int, floats, List[int]]), optional + List of entities (nodal/elements) to get the results from, by default None + result_in_entity_cs : bool, optional + Obtain the results in the entity coordinate system, by default False + return_operator : bool, optional + Return the last used operator (most of the times it will be a Rescope operator). + Defaults to ``False``. + + Returns + ------- + np.array + Ids of the entities (nodes or elements) + np.array + Values of the entities for the requested solution + dpf.Operator + If ``return_operator`` is ``True``, then it will return the last instantiated + operator (most of the times a + + `Rescope operator`_ + + :class:`rescope ` + .) + + Raises + ------ + ResultNotFound + The given result (stress, strain, ...) could not be found in the RST file. + TypeError + Only floats and ints are allowed to scope steps/time. + NotImplementedError + Component input selection is still not supported. + """ + + # todo: accepts components in nodes. + mesh: dpf.MeshedRegion = self.metadata.meshed_region + + if isinstance(scope_ids, np.ndarray): + scope_ids = scope_ids.tolist() + + op = self._get_operator(result_field) + + # CS output + if not result_in_entity_cs: + op.inputs.bool_rotate_to_global.connect(True) + else: + op.inputs.bool_rotate_to_global.connect(False) + + # Setting time steps + self._set_input_timestep_scope(op, rnum) + + # getting the ids of the entities scope + scope_ids = self._get_entities_ids(scope_ids, requested_location) + + # Set type of return + ids = self._set_mesh_scoping(op, mesh, requested_location, scope_ids) + + if requested_location.lower() == "elemental": + op = self._set_element_results( + op, mesh + ) # overwrite op to be the elemental results OP + + # Applying rescope to make sure the order is right + if not isinstance(ids, list): + ids = ids.astype(int).tolist() + + op = self._set_rescope(op, ids) + + return op if return_operator else self._extract_data(op) + + def nodal_displacement(self, rnum, in_nodal_coord_sys=None, nodes=None): + """Returns the DOF solution for each node in the global + cartesian coordinate system or nodal coordinate system. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + in_nodal_coord_sys : bool, optional + When ``True``, returns results in the nodal coordinate + system. Default ``False``. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + Returns + ------- + nnum : int np.ndarray + Node numbers associated with the results. + + result : float np.ndarray + Array of nodal displacements. Array + is (``nnod`` x ``sumdof``), the number of nodes by the + number of degrees of freedom which includes ``numdof`` and + ``nfldof`` + + Examples + -------- + Return the nodal solution (in this case, displacement) for the + first result of ``"file.rst"`` + + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, data = rst.nodal_solution(0) + + Return the nodal solution just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, data = rst.nodal_solution(0, nodes='MY_COMPONENT') + + Return the nodal solution just for the nodes from 20 through 50. + + >>> nnum, data = rst.nodal_solution(0, nodes=range(20, 51)) + + Notes + ----- + Some solution results may not include results for each node. + These results are removed by and the node numbers of the + solution results are reflected in ``nnum``. + """ + return self._get_nodes_result(rnum, "displacement", in_nodal_coord_sys, nodes) + + def nodal_solution( + self, rnum, in_nodal_coord_sys=None, nodes=None, return_temperature=False + ): + """Returns the DOF solution for each node in the global + cartesian coordinate system or nodal coordinate system. + + Solution may be nodal temperatures or nodal displacements + depending on the type of the solution. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + in_nodal_coord_sys : bool, optional + When ``True``, returns results in the nodal coordinate + system. Default ``False``. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + return_temperature: bool, optional + When ``True``, returns the nodal temperature instead of + the displacement. Default ``False``. + + Returns + ------- + nnum : int np.ndarray + Node numbers associated with the results. + + result : float np.ndarray + Array of nodal displacements or nodal temperatures. Array + is (``nnod`` x ``sumdof``), the number of nodes by the + number of degrees of freedom which includes ``numdof`` and + ``nfldof`` + + Examples + -------- + Return the nodal solution (in this case, displacement) for the + first result of ``"file.rst"`` + + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, data = rst.nodal_solution(0) + + Return the nodal solution just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, data = rst.nodal_solution(0, nodes='MY_COMPONENT') + + Return the nodal solution just for the nodes from 20 through 50. + + >>> nnum, data = rst.nodal_solution(0, nodes=range(20, 51)) + + Notes + ----- + Some solution results may not include results for each node. + These results are removed by and the node numbers of the + solution results are reflected in ``nnum``. + """ + + if hasattr(self.model.results, "displacement") and not return_temperature: + return self.nodal_displacement(rnum, in_nodal_coord_sys, nodes) + elif hasattr(self.model.results, "temperature"): + return self.nodal_temperature(rnum, nodes) + else: + raise ResultNotFound( + "The current analysis does not have 'displacement' or 'temperature' results." + ) + + def nodal_temperature(self, rnum, nodes=None): + """Retrieves the temperature for each node in the + solution. + + The order of the results corresponds to the sorted node + numbering. + + Equivalent MAPDL command: PRNSOL, TEMP + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + Returns + ------- + nnum : numpy.ndarray + Node numbers of the result. + + temperature : numpy.ndarray + Temperature at each node. + + Examples + -------- + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, temp = rst.nodal_temperature(0) + + Return the temperature just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, temp = rst.nodal_stress(0, nodes='MY_COMPONENT') + + Return the temperature just for the nodes from 20 through 50. + + >>> nnum, temp = rst.nodal_solution(0, nodes=range(20, 51)) + + """ + return self._get_nodes_result(rnum, "temperature", nodes) + + def nodal_voltage(self, rnum, in_nodal_coord_sys=None, nodes=None): + """Retrieves the voltage for each node in the + solution. + + The order of the results corresponds to the sorted node + numbering. + + Equivalent MAPDL command: PRNSOL, VOLT + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + Returns + ------- + nnum : numpy.ndarray + Node numbers of the result. + + voltage : numpy.ndarray + voltage at each node. + + Examples + -------- + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, temp = rst.nodal_voltage(0) + + Return the voltage just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, temp = rst.nodal_stress(0, nodes='MY_COMPONENT') + + """ + return self._get_nodes_result( + rnum, "electric_potential", in_nodal_coord_sys, nodes + ) + + def element_stress( + self, rnum, principal=None, in_element_coord_sys=None, elements=None, **kwargs + ): + """Retrieves the element component stresses. + + Equivalent ANSYS command: PRESOL, S + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + principal : bool, optional + Returns principal stresses instead of component stresses. + Default False. + + in_element_coord_sys : bool, optional + Returns the results in the element coordinate system. + Default False and will return the results in the global + coordinate system. + + elements : str, sequence of int or str, optional + Select a limited subset of elements. Can be a element + component or array of element numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + **kwargs : optional keyword arguments + Hidden options for distributed result files. + + Returns + ------- + enum : np.ndarray + ANSYS element numbers corresponding to each element. + + element_stress : list + Stresses at each element for each node for Sx Sy Sz Sxy + Syz Sxz or SIGMA1, SIGMA2, SIGMA3, SINT, SEQV when + principal is True. + + enode : list + Node numbers corresponding to each element's stress + results. One list entry for each element. + + Examples + -------- + Element component stress for the first result set. + + >>> rst.element_stress(0) + + Element principal stress for the first result set. + + >>> enum, element_stress, enode = result.element_stress(0, principal=True) + + Notes + ----- + Shell stresses for element 181 are returned for top and bottom + layers. Results are ordered such that the top layer and then + the bottom layer is reported. + """ + if principal: + op = self._get_elem_result( + rnum, + "stress", + in_element_coord_sys=in_element_coord_sys, + elements=elements, + return_operator=True, + **kwargs, + ) + return self._get_principal(op) + return self._get_elem_result( + rnum, "stress", in_element_coord_sys, elements, **kwargs + ) + + def element_nodal_stress( + self, rnum, principal=None, in_element_coord_sys=None, elements=None, **kwargs + ): + """Retrieves the nodal stresses for each element. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a list containing + (step, substep) of the requested result. + + principal : bool, optional + Returns principal stresses instead of component stresses. + Default False. + + in_element_coord_sys : bool, optional + Returns the results in the element coordinate system if ``True``. + Else, it returns the results in the global coordinate system. + Default False + + elements : str, sequence of int or str, optional + Select a limited subset of elements. Can be a element + component or array of element numbers. For example: + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + **kwargs : optional keyword arguments + Hidden options for distributed result files. + + Returns + ------- + enum : np.ndarray + ANSYS element numbers corresponding to each element. + + element_stress : list + Stresses at each element for each node for Sx Sy Sz Sxy + Syz Sxz or SIGMA1, SIGMA2, SIGMA3, SINT, SEQV when + principal is True. + + enode : list + Node numbers corresponding to each element's stress + results. One list entry for each element. + + Examples + -------- + Element component stress for the first result set. + + >>> rst.element_stress(0) + + Element principal stress for the first result set. + + >>> enum, element_stress, enode = result.element_stress(0, principal=True) + + Notes + ----- + Shell stresses for element 181 are returned for top and bottom + layers. Results are ordered such that the top layer and then + the bottom layer is reported. + """ + if principal: + op = self._get_elemnodal_result( + rnum, + "stress", + in_element_coord_sys=in_element_coord_sys, + elements=elements, + return_operator=True, + **kwargs, + ) + return self._get_principal(op) + return self._get_elemnodal_result( + rnum, "stress", in_element_coord_sys, elements, **kwargs + ) + + def nodal_elastic_strain(self, rnum, in_nodal_coord_sys=False, nodes=None): + """Nodal component elastic strains. This record contains + strains in the order ``X, Y, Z, XY, YZ, XZ, EQV``. + + Elastic strains can be can be nodal values extrapolated from + the integration points or values at the integration points + moved to the nodes. + + Equivalent MAPDL command: ``PRNSOL, EPEL`` + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + Returns + ------- + nnum : np.ndarray + MAPDL node numbers. + + elastic_strain : np.ndarray + Nodal component elastic strains. Array is in the order + ``X, Y, Z, XY, YZ, XZ, EQV``. + + .. versionchanged:: 0.64 + The nodes with no values are now equals to zero. + The results of the midnodes are also calculated and + presented. + + Examples + -------- + Load the nodal elastic strain for the first result. + + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, elastic_strain = rst.nodal_elastic_strain(0) + + Return the nodal elastic strain just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, elastic_strain = rst.nodal_elastic_strain(0, nodes='MY_COMPONENT') + + Return the nodal elastic strain just for the nodes from 20 through 50. + + >>> nnum, elastic_strain = rst.nodal_elastic_strain(0, nodes=range(20, 51)) + + Notes + ----- + Nodes without a strain will be NAN. + + .. + """ + return self._get_nodes_result( + rnum, "elastic_strain", in_nodal_coord_sys=in_nodal_coord_sys, nodes=nodes + ) + + def nodal_plastic_strain(self, rnum, in_nodal_coord_sys=False, nodes=None): + """Nodal component plastic strains. + + This record contains strains in the order: + ``X, Y, Z, XY, YZ, XZ, EQV``. + + Plastic strains are always values at the integration points + moved to the nodes. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + Returns + ------- + nnum : np.ndarray + MAPDL node numbers. + + plastic_strain : np.ndarray + Nodal component plastic strains. Array is in the order + ``X, Y, Z, XY, YZ, XZ, EQV``. + + Examples + -------- + Load the nodal plastic strain for the first solution. + + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, plastic_strain = rst.nodal_plastic_strain(0) + + Return the nodal plastic strain just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, plastic_strain = rst.nodal_plastic_strain(0, nodes='MY_COMPONENT') + + Return the nodal plastic strain just for the nodes from 20 + through 50. + + >>> nnum, plastic_strain = rst.nodal_plastic_strain(0, nodes=range(20, 51)) + + """ + return self._get_nodes_result(rnum, "plastic_strain", in_nodal_coord_sys, nodes) + + def nodal_acceleration(self, rnum, in_nodal_coord_sys=None, nodes=None): + """Nodal velocities for a given result set. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + in_nodal_coord_sys : bool, optional + When ``True``, returns results in the nodal coordinate + system. Default False. + + Returns + ------- + nnum : int np.ndarray + Node numbers associated with the results. + + result : float np.ndarray + Array of nodal accelerations. Array is (``nnod`` x + ``sumdof``), the number of nodes by the number of degrees + of freedom which includes ``numdof`` and ``nfldof`` + + Examples + -------- + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, data = rst.nodal_acceleration(0) + + Notes + ----- + Some solution results may not include results for each node. + These results are removed by and the node numbers of the + solution results are reflected in ``nnum``. + """ + return self._get_nodes_result(rnum, "acceleration", in_nodal_coord_sys, nodes) + + def nodal_reaction_forces(self, rnum, in_nodal_coord_sys=False, nodes=None): + """Nodal reaction forces. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + Returns + ------- + rforces : np.ndarray + Nodal reaction forces for each degree of freedom. + + nnum : np.ndarray + Node numbers corresponding to the reaction forces. Node + numbers may be repeated if there is more than one degree + of freedom for each node. + + dof : np.ndarray + Degree of freedom corresponding to each node using the + MAPDL degree of freedom reference table. See + ``rst.result_dof`` for the corresponding degrees of + freedom for a given solution. + + Examples + -------- + Get the nodal reaction forces for the first result and print + the reaction forces of a single node. + + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> rforces, nnum, dof = rst.nodal_reaction_forces(0) + >>> dof_ref = rst.result_dof(0) + >>> rforces[:3], nnum[:3], dof[:3], dof_ref + (array([ 24102.21376091, -109357.01854005, 22899.5303263 ]), + array([4142, 4142, 4142]), + array([1, 2, 3], dtype=int32), + ['UX', 'UY', 'UZ']) + + """ + return self._get_nodes_result(rnum, "reaction_force", in_nodal_coord_sys, nodes) + + def nodal_stress(self, rnum, in_nodal_coord_sys=False, nodes=None): + """Retrieves the component stresses for each node in the + solution. + + The order of the results corresponds to the sorted node + numbering. + + Computes the nodal stress by averaging the stress for each + element at each node. Due to the discontinuities across + elements, stresses will vary based on the element they are + evaluated from. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + Returns + ------- + nnum : numpy.ndarray + Node numbers of the result. + + stress : numpy.ndarray + Stresses at ``X, Y, Z, XY, YZ, XZ`` averaged at each corner + node. + + Examples + -------- + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, stress = rst.nodal_stress(0) + + Return the nodal stress just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, stress = rst.nodal_stress(0, nodes='MY_COMPONENT') + + Return the nodal stress just for the nodes from 20 through 50. + + >>> nnum, stress = rst.nodal_solution(0, nodes=range(20, 51)) + + Notes + ----- + Nodes without a stress value will be NAN. + Equivalent ANSYS command: PRNSOL, S + """ + return self._get_nodes_result(rnum, "stress", in_nodal_coord_sys, nodes) + + def nodal_thermal_strain(self, rnum, in_nodal_coord_sys=False, nodes=None): + """Nodal component thermal strain. + + This record contains strains in the order X, Y, Z, XY, YZ, XZ, + EQV, and eswell (element swelling strain). Thermal strains + are always values at the integration points moved to the + nodes. + + Equivalent MAPDL command: PRNSOL, EPTH, COMP + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + Returns + ------- + nnum : np.ndarray + MAPDL node numbers. + + thermal_strain : np.ndarray + Nodal component plastic strains. Array is in the order + ``X, Y, Z, XY, YZ, XZ, EQV, ESWELL`` + + Examples + -------- + Load the nodal thermal strain for the first solution. + + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, thermal_strain = rst.nodal_thermal_strain(0) + + Return the nodal thermal strain just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, thermal_strain = rst.nodal_thermal_strain(0, nodes='MY_COMPONENT') + + Return the nodal thermal strain just for the nodes from 20 through 50. + + >>> nnum, thermal_strain = rst.nodal_thermal_strain(0, nodes=range(20, 51)) + """ + return self._get_nodes_result(rnum, "thermal_strain", in_nodal_coord_sys, nodes) + + def nodal_velocity(self, rnum, in_nodal_coord_sys=False, nodes=None): + """Nodal velocities for a given result set. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + in_nodal_coord_sys : bool, optional + When ``True``, returns results in the nodal coordinate + system. Default False. + + Returns + ------- + nnum : int np.ndarray + Node numbers associated with the results. + + result : float np.ndarray + Array of nodal velocities. Array is (``nnod`` x + ``sumdof``), the number of nodes by the number of degrees + of freedom which includes ``numdof`` and ``nfldof`` + + Examples + -------- + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, data = rst.nodal_velocity(0) + + Notes + ----- + Some solution results may not include results for each node. + These results are removed by and the node numbers of the + solution results are reflected in ``nnum``. + """ + return self._get_nodes_result(rnum, "velocity", in_nodal_coord_sys, nodes) + + def nodal_static_forces(self, rnum, in_nodal_coord_sys=False, nodes=None): + """Return the nodal forces averaged at the nodes. + + Nodal forces are computed on an element by element basis, and + this method averages the nodal forces for each element for + each node. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + nodes : str, sequence of int or str, optional + Select a limited subset of nodes. Can be a nodal + component or array of node numbers. For example + + * ``"MY_COMPONENT"`` + * ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + * ``np.arange(1000, 2001)`` + + Returns + ------- + nnum : np.ndarray + MAPDL node numbers. + + forces : np.ndarray + Averaged nodal forces. Array is sized ``[nnod x numdof]`` + where ``nnod`` is the number of nodes and ``numdof`` is the + number of degrees of freedom for this solution. + + Examples + -------- + Load the nodal static forces for the first result using the + example hexahedral result file. + + >>> from ansys.mapdl import reader as pymapdl_reader + >>> from ansys.mapdl.reader import examples + >>> rst = pymapdl_reader.read_binary(examples.rstfile) + >>> nnum, forces = rst.nodal_static_forces(0) + + Return the nodal static forces just for the nodal component + ``'MY_COMPONENT'``. + + >>> nnum, forces = rst.nodal_static_forces(0, nodes='MY_COMPONENT') + + Return the nodal static forces just for the nodes from 20 through 50. + + >>> nnum, forces = rst.nodal_static_forces(0, nodes=range(20, 51)) + + Notes + ----- + Nodes without a a nodal will be NAN. These are generally + midside (quadratic) nodes. + """ + return self._get_nodes_result(rnum, "nodal_force", in_nodal_coord_sys, nodes) + + def principal_nodal_stress(self, rnum, in_nodal_coord_sys=False, nodes=None): + """Computes the principal component stresses for each node in + the solution. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + Returns + ------- + nodenum : numpy.ndarray + Node numbers of the result. + + pstress : numpy.ndarray + Principal stresses, stress intensity, and equivalent stress. + [sigma1, sigma2, sigma3, sint, seqv] + + Examples + -------- + Load the principal nodal stress for the first solution. + + >>> from ansys.mapdl.core.reader import DPFResult as Result + >>> rst = Result('file.rst') + >>> nnum, stress = rst.principal_nodal_stress(0) + + Notes + ----- + ANSYS equivalent of: + PRNSOL, S, PRIN + + which returns: + S1, S2, S3 principal stresses, SINT stress intensity, and SEQV + equivalent stress. + + Internal averaging algorithm averages the component values + from the elements at a common node and then calculates the + principal using the averaged value. + + See the MAPDL ``AVPRIN`` command for more details. + ``ansys-mapdl-reader`` uses the default ``AVPRIN, 0`` option. + + """ + op = self._get_nodes_result( + rnum, + "stress", + in_nodal_coord_sys=in_nodal_coord_sys, + nodes=nodes, + return_operator=True, + ) + return self._get_principal(op) + + @property + def n_results(self): + """Number of results""" + return self.model.metadata.result_info.n_results + + @property + def filename(self) -> str: + """String form of the filename. This property is read-only.""" + return self._rst # in the reader, this contains the complete path. + + @property + def pathlib_filename(self) -> pathlib.Path: + """Return the ``pathlib.Path`` version of the filename. This property can not be set.""" + return pathlib.Path(self._rst) + + @property + def nsets(self): + return self.metadata.time_freq_support.n_sets + + def parse_step_substep(self, user_input): + """Converts (step, substep) to a cumulative index""" + if isinstance(user_input, int): + return self.metadata.time_freq_support.get_cumulative_index( + user_input + ) # 0 based indexing + + elif isinstance(user_input, (list, tuple)): + return self.metadata.time_freq_support.get_cumulative_index( + user_input[0], user_input[1] + ) + + else: + raise TypeError("Input must be either an int or a list") + + @property + def version(self): + """The version of MAPDL used to generate this result file. + + Examples + -------- + >>> mapdl.result.version + 20.1 + """ + return float(self.model.metadata.result_info.solver_version) + + @property + def available_results(self): + """Available result types. + + .. versionchanged:: 0.64 + From 0.64, the MAPDL data labels (i.e. NSL for nodal displacements, + ENS for nodal stresses, etc) are not included in the output of this command. + + Examples + -------- + >>> mapdl.result.available_results + Available Results: + Nodal Displacement + Nodal Velocity + Nodal Acceleration + Nodal Force + ElementalNodal Element nodal Forces + ElementalNodal Stress + Elemental Volume + Elemental Energy-stiffness matrix + Elemental Hourglass Energy + Elemental thermal dissipation energy + Elemental Kinetic Energy + Elemental co-energy + Elemental incremental energy + ElementalNodal Strain + ElementalNodal Thermal Strains + ElementalNodal Thermal Strains eqv + ElementalNodal Swelling Strains + ElementalNodal Temperature + Nodal Temperature + ElementalNodal Heat flux + ElementalNodal Heat flux + """ + text = "Available Results:\n" + for each_available_result in self.model.metadata.result_info.available_results: + text += ( # TODO: Missing label data NSL, VSL, etc + each_available_result.native_location + + " " + + each_available_result.physical_name + + "\n" + ) + return text + + @property + def n_sector(self): + """Number of sectors""" + if self.model.metadata.result_info.has_cyclic: + return self.model.metadata.result_info.cyclic_support.num_sectors() + + @property + def num_stages(self): + """Number of cyclic stages in the model""" + if self.model.metadata.result_info.has_cyclic: + return self.model.metadata.result_info.cyclic_support.num_stages + + @property + def title(self): + """Title of model in database""" + return self.model.metadata.result_info.main_title + + @property + def is_cyclic(self): + return self.model.metadata.result_info.has_cyclic + + @property + def units(self): + return self.model.metadata.result_info.unit_system_name + + def __repr__(self): + if self.is_distributed: + rst_info = ["PyMAPDL Reader Distributed Result"] + else: + rst_info = ["PyMAPDL Result"] + + rst_info.append("{:<12s}: {:s}".format("title".capitalize(), self.title)) + # rst_info.append("{:<12s}: {:s}".format("subtitle".capitalize(), self.subtitle)) #TODO: subtitle is not implemented in DPF. + rst_info.append("{:<12s}: {:s}".format("units".capitalize(), self.units)) + + rst_info.append("{:<12s}: {}".format("Version", self.version)) + rst_info.append("{:<12s}: {}".format("Cyclic", self.is_cyclic)) + rst_info.append("{:<12s}: {:d}".format("Result Sets", self.nsets)) + + rst_info.append("{:<12s}: {:d}".format("Nodes", self.mesh.nodes.n_nodes)) + rst_info.append( + "{:<12s}: {:d}".format("Elements", self.mesh.elements.n_elements) + ) + + rst_info.append("\n") + rst_info.append(self.available_results) + return "\n".join(rst_info) + + def nodal_time_history(self, solution_type="NSL", in_nodal_coord_sys=None): + """The DOF solution for each node for all result sets. + + The nodal results are returned returned in the global + cartesian coordinate system or nodal coordinate system. + + Parameters + ---------- + solution_type: str, optional + The solution type. Must be either nodal displacements + (``'NSL'``), nodal velocities (``'VEL'``) or nodal + accelerations (``'ACC'``). + + in_nodal_coord_sys : bool, optional + When ``True``, returns results in the nodal coordinate system. + Default ``False``. + + Returns + ------- + nnum : int np.ndarray + Node numbers associated with the results. + + result : float np.ndarray + Nodal solution for all result sets. Array is sized + ``rst.nsets x nnod x Sumdof``, which is the number of + time steps by number of nodes by degrees of freedom. + """ + if not isinstance(solution_type, str): + raise TypeError("Solution type must be a string") + + if solution_type == "NSL": + func = self.nodal_solution + elif solution_type == "VEL": + func = self.nodal_velocity + elif solution_type == "ACC": + func = self.nodal_acceleration + else: + raise ValueError( + "Argument 'solution type' must be either 'NSL', " "'VEL', or 'ACC'" + ) + + # size based on the first result + nnum, sol = func(0, in_nodal_coord_sys) + data = np.empty((self.nsets, sol.shape[0], sol.shape[1]), np.float64) + data[0] = sol + for i in range(1, self.nsets): + data[i] = func(i, in_nodal_coord_sys)[1] + + return nnum, data + + @property + def time_values(self): + "Values for the time/frequency" + return self.metadata.time_freq_support.time_frequencies.data_as_list + + @property + def materials(self) -> dict[int, dict[str, int | float]]: + """Result file material properties. + + Returns + ------- + dict + Dictionary of Materials. Keys are the material numbers, + and each material is a dictionary of the material + properties of that material with only the valid entries filled. + + Notes + ----- + Material properties: + + - EX : Elastic modulus, element x direction (Force/Area) + - EY : Elastic modulus, element y direction (Force/Area) + - EZ : Elastic modulus, element z direction (Force/Area) + - ALPX : Coefficient of thermal expansion, element x direction (Strain/Temp) + - ALPY : Coefficient of thermal expansion, element y direction (Strain/Temp) + - ALPZ : Coefficient of thermal expansion, element z direction (Strain/Temp) + - REFT : Reference temperature (as a property) [TREF] + - PRXY : Major Poisson's ratio, x-y plane + - PRYZ : Major Poisson's ratio, y-z plane + - PRX Z : Major Poisson's ratio, x-z plane + - NUXY : Minor Poisson's ratio, x-y plane + - NUYZ : Minor Poisson's ratio, y-z plane + - NUXZ : Minor Poisson's ratio, x-z plane + - GXY : Shear modulus, x-y plane (Force/Area) + - GYZ : Shear modulus, y-z plane (Force/Area) + - GXZ : Shear modulus, x-z plane (Force/Area) + - DAMP : K matrix multiplier for damping [BETAD] (Time) + - MU : Coefficient of friction (or, for FLUID29 and FLUID30 + elements, boundary admittance) + - DENS : Mass density (Mass/Vol) + - C : Specific heat (Heat/Mass*Temp) + - ENTH : Enthalpy (e DENS*C d(Temp)) (Heat/Vol) + - KXX : Thermal conductivity, element x direction + (Heat*Length / (Time*Area*Temp)) + - KYY : Thermal conductivity, element y direction + (Heat*Length / (Time*Area*Temp)) + - KZZ : Thermal conductivity, element z direction + (Heat*Length / (Time*Area*Temp)) + - HF : Convection (or film) coefficient (Heat / (Time*Area*Temp)) + - EMIS : Emissivity + - QRATE : Heat generation rate (MASS71 element only) (Heat/Time) + - VISC : Viscosity (Force*Time / Length2) + - SONC : Sonic velocity (FLUID29 and FLUID30 elements only) (Length/Time) + - RSVX : Electrical resistivity, element x direction (Resistance*Area / Length) + - RSVY : Electrical resistivity, element y direction (Resistance*Area / Length) + - RSVZ : Electrical resistivity, element z direction (Resistance*Area / Length) + - PERX : Electric permittivity, element x direction (Charge2 / (Force*Length)) + - PERY : Electric permittivity, element y direction (Charge2 / (Force*Length)) + - PERZ : Electric permittivity, element z direction (Charge2 / (Force*Length)) + - MURX : Magnetic relative permeability, element x direction + - MURY : Magnetic relative permeability, element y direction + - MURZ : Magnetic relative permeability, element z direction + - MGXX : Magnetic coercive force, element x direction (Charge / (Length*Time)) + - MGYY : Magnetic coercive force, element y direction (Charge / (Length*Time)) + - MGZZ : Magnetic coercive force, element z direction (Charge / (Length*Time)) + + Materials may contain the key ``"stress_failure_criteria"``, which + contains failure criteria information for temperature-dependent stress + limits. This includes the following keys: + + - XTEN : Allowable tensile stress or strain in the x-direction. (Must + be positive.) + + - XCMP : Allowable compressive stress or strain in the + x-direction. (Defaults to negative of XTEN.) + + - YTEN : Allowable tensile stress or strain in the y-direction. (Must + be positive.) + + - YCMP : Allowable compressive stress or strain in the + y-direction. (Defaults to negative of YTEN.) + + - ZTEN : Allowable tensile stress or strain in the z-direction. (Must + be positive.) + + - ZCMP : Allowable compressive stress or strain in the + z-direction. (Defaults to negative of ZTEN.) + + - XY : Allowable XY stress or shear strain. (Must be positive.) + + - YZ : Allowable YZ stress or shear strain. (Must be positive.) + + - XZ : Allowable XZ stress or shear strain. (Must be positive.) + + - XYCP : XY coupling coefficient (Used only if Lab1 = S). Defaults to -1.0. [1] + + - YZCP : YZ coupling coefficient (Used only if Lab1 = S). Defaults to -1.0. [1] + + - XZCP : XZ coupling coefficient (Used only if Lab1 = S). Defaults to -1.0. [1] + + - XZIT : XZ tensile inclination parameter for Puck failure index (default = + 0.0) + + - XZIC : XZ compressive inclination parameter for Puck failure index + (default = 0.0) + + - YZIT : YZ tensile inclination parameter for Puck failure index + (default = 0.0) + + - YZIC : YZ compressive inclination parameter for Puck failure index + (default = 0.0) + + Examples + -------- + Return the material properties from the example result + file. Note that the keys of ``rst.materials`` is the material + type. + + >>> from ansys.mapdl import reader as pymapdl_reader + >>> from ansys.mapdl.reader import examples + >>> rst = pymapdl_reader.read_binary(examples.rstfile) + >>> rst.materials + {1: {'EX': 16900000.0, 'NUXY': 0.31, 'DENS': 0.00041408}} + + """ + mats = self.mesh.property_field("mat") + mat_prop = dpf.operators.result.mapdl_material_properties() + mat_prop.inputs.materials.connect(mats) + + mat_prop.connect(0, MATERIAL_PROPERTIES) + mat_prop.inputs.data_sources.connect(self.model) + prop_field = mat_prop.outputs.properties_value.get_data() + + # Obtaining materials ids + mat_ids: set[int] = set() + for prop in prop_field: + mat_ids = mat_ids.union(prop.scoping.ids.tolist()) + + # Building dictionary of materials + mats = {} + for mat_id in mat_ids: + mats[mat_id] = {} + + for each_label in prop_field.labels: + field = prop_field.get_fields({each_label: 1})[0] + data = field.data.tolist() + + if data and len(data) > 0 and data[0] != 0: + mats[mat_id][each_label] = data[0] if len(data) == 1 else data + return mats + + def plot_nodal_stress( + self, + rnum, + comp=None, + show_displacement=False, + displacement_factor=1, + node_components=None, + element_components=None, + sel_type_all=True, + treat_nan_as_zero=True, + **kwargs, + ): + """Plots the stresses at each node in the solution. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + comp : str, optional + Stress component to display. Available options: + - ``"X"`` + - ``"Y"`` + - ``"Z"`` + - ``"XY"`` + - ``"YZ"`` + - ``"XZ"`` + + node_components : list, optional + Accepts either a string or a list strings of node + components to plot. For example: + ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + + element_components : list, optional + Accepts either a string or a list strings of element + components to plot. For example: + ``['MY_COMPONENT', 'MY_OTHER_COMPONENT]`` + + sel_type_all : bool, optional + If node_components is specified, plots those elements + containing all nodes of the component. Default ``True``. + + treat_nan_as_zero : bool, optional + Treat NAN values (i.e. stresses at midside nodes) as zero + when plotting. + + kwargs : keyword arguments + Additional keyword arguments. See ``help(pyvista.plot)`` + + Returns + ------- + cpos : list + 3 x 3 vtk camera position. + + Examples + -------- + Plot the X component nodal stress while showing displacement. + + >>> rst.plot_nodal_stress(0, comp='x', show_displacement=True) + """ + # if not comp: + # comp = "X" + + # ind = COMPONENTS.index(comp) + + # op = self._get_nodes_result( + # rnum, + # "stress", + # nodes=node_components, + # in_nodal_coord_sys=False, + # return_operator=True, + # ) + # fc = op.outputs.fields_as_fields_container()[0] + + raise NotImplementedError("WIP") + + @property + def _elements(self): + return self.mesh.elements.scoping.ids + + def element_lookup(self, element_id): + """Index of the element within the result mesh""" + mapping = dict(zip(self._elements, np.arange(self.mesh.elements.n_elements))) + if element_id not in mapping: + raise KeyError( + f"Element ID {element_id} not found in the result mesh. " + f"Available element IDs: {list(mapping.keys())}" + ) + + return mapping[element_id] + + def overwrite_element_solution_record(self, data, rnum, solution_type, element_id): + """Overwrite element solution record. + + This method replaces solution data for of an element at a + result index for a given solution type. The number of items + in ``data`` must match the number of items in the record. + + If you are not sure how many records are in a given record, + use ``element_solution_data`` to retrieve all the records for + a given ``solution_type`` and check the number of items in the + record. + + Note: The record being replaced cannot be a compressed record. + If the result file uses compression (default sparse + compression as of 2019R1), you can disable this within MAPDL + with: + ``/FCOMP, RST, 0`` + + Parameters + ---------- + data : list or np.ndarray + Data that will replace the existing records. + + rnum : int + Zero based result number. + + solution_type : str + Element data type to overwrite. + + - EMS: misc. data + - ENF: nodal forces + - ENS: nodal stresses + - ENG: volume and energies + - EGR: nodal gradients + - EEL: elastic strains + - EPL: plastic strains + - ECR: creep strains + - ETH: thermal strains + - EUL: euler angles + - EFX: nodal fluxes + - ELF: local forces + - EMN: misc. non-sum values + - ECD: element current densities + - ENL: nodal nonlinear data + - EHC: calculated heat generations + - EPT: element temperatures + - ESF: element surface stresses + - EDI: diffusion strains + - ETB: ETABLE items + - ECT: contact data + - EXY: integration point locations + - EBA: back stresses + - ESV: state variables + - MNL: material nonlinear record + + element_id : int + Ansys element number (e.g. ``1``) + + Examples + -------- + Overwrite the elastic strain record for element 1 for the + first result with random data. + + >>> from ansys.mapdl import reader as pymapdl_reader + >>> rst = pymapdl_reader.read_binary('file.rst') + >>> data = np.random.random(56) + >>> rst.overwrite_element_solution_data(data, 0, 'EEL', 1) + """ + raise NotImplementedError( + NOT_AVAILABLE_METHOD.format(method="overwrite_element_solution_record") + ) + + def overwrite_element_solution_records(self, element_data, rnum, solution_type): + """Overwrite element solution record. + + This method replaces solution data for a set of elements at a + result index for a given solution type. The number of items + in ``data`` must match the number of items in the record. + + If you are not sure how many records are in a given record, + use ``element_solution_data`` to retrieve all the records for + a given ``solution_type`` and check the number of items in the + record. + + Note: The record being replaced cannot be a compressed record. + If the result file uses compression (default sparse + compression as of 2019R1), you can disable this within MAPDL + with: + ``/FCOMP, RST, 0`` + + Parameters + ---------- + element_data : dict + Dictionary of results that will replace the existing records. + + rnum : int + Zero based result number. + + solution_type : str + Element data type to overwrite. + + - EMS: misc. data + - ENF: nodal forces + - ENS: nodal stresses + - ENG: volume and energies + - EGR: nodal gradients + - EEL: elastic strains + - EPL: plastic strains + - ECR: creep strains + - ETH: thermal strains + - EUL: euler angles + - EFX: nodal fluxes + - ELF: local forces + - EMN: misc. non-sum values + - ECD: element current densities + - ENL: nodal nonlinear data + - EHC: calculated heat generations + - EPT: element temperatures + - ESF: element surface stresses + - EDI: diffusion strains + - ETB: ETABLE items + - ECT: contact data + - EXY: integration point locations + - EBA: back stresses + - ESV: state variables + - MNL: material nonlinear record + + Examples + -------- + Overwrite the elastic strain record for elements 1 and 2 with + for the first result with random data. + + >>> from ansys.mapdl import reader as pymapdl_reader + >>> rst = pymapdl_reader.read_binary('file.rst') + >>> data = {1: np.random.random(56), + 2: np.random.random(56)} + >>> rst.overwrite_element_solution_data(data, 0, 'EEL') + """ + raise NotImplementedError( + NOT_AVAILABLE_METHOD.format(method="overwrite_element_solution_records") + ) + + def read_record(self, pointer, return_bufsize=False): + """Reads a record at a given position. + + Because ANSYS 19.0+ uses compression by default, you must use + this method rather than ``np.fromfile``. + + Parameters + ---------- + pointer : int + ANSYS file position (n words from start of file). A word + is four bytes. + + return_bufsize : bool, optional + Returns the number of words read (includes header and + footer). Useful for determining the new position in the + file after reading a record. + + Returns + ------- + record : np.ndarray + The record read as a ``n x 1`` numpy array. + + bufsize : float, optional + When ``return_bufsize`` is enabled, returns the number of + words read. + + """ + raise NotImplementedError(NOT_AVAILABLE_METHOD.format(method="read_record")) + + def text_result_table(self, rnum): + """Returns a text result table for plotting""" + raise NotImplementedError( + NOT_AVAILABLE_METHOD.format(method="text_result_table") + ) + + def write_tables(self, filename: str | pathlib.Path): + """Write binary tables to ASCII. Assumes int32 + + Parameters + ---------- + filename : str, pathlib.Path + Filename to write the tables to. + + Examples + -------- + >>> rst.write_tables('tables.txt') + """ + raise NotImplementedError(NOT_AVAILABLE_METHOD.format(method="write_tables")) + + def cs_4x4(self, cs_cord, as_vtk_matrix=False): + """Return a 4x4 transformation matrix for a given coordinate system. + + Parameters + ---------- + cs_cord : int + Coordinate system index. + + as_vtk_matrix : bool, default: False + Return the transformation matrix as a ``vtkMatrix4x4``. + + Returns + ------- + np.ndarray | vtk.vtkMatrix4x4 + Matrix or ``vtkMatrix4x4`` depending on the value of ``as_vtk_matrix``. + + Notes + ----- + Values 11 and greater correspond to local coordinate systems + + Examples + -------- + Return the transformation matrix for coordinate system 1. + + >>> tmat = rst.cs_4x4(1) + >>> tmat + array([[1., 0., 0., 0.], + [0., 1., 0., 0.], + [0., 0., 1., 0.], + [0., 0., 0., 1.]]) + + Return the transformation matrix for coordinate system 5. This + corresponds to ``CSYS, 5``, the cylindrical with global Cartesian Y as + the axis of rotation. + + >>> tmat = rst.cs_4x4(5) + >>> tmat + array([[ 1., 0., 0., 0.], + [ 0., 0., -1., 0.], + [ 0., 1., 0., 0.], + [ 0., 0., 0., 1.]]) + + """ + raise NotImplementedError(NOT_AVAILABLE_METHOD.format(method="cs_4x4")) + + def solution_info(self, rnum): + """Return an informative dictionary of solution data for a + result. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + Returns + ------- + header : dict + Double precision solution header data. + + Examples + -------- + Extract the solution info from a sample example result file. + + >>> from ansys.mapdl.reader import examples + >>> rst = examples.download_pontoon() + >>> rst.solution_info(0) + {'cgcent': [], + 'fatjack': [], + 'timfrq': 44.85185724963714, + 'lfacto': 1.0, + 'lfactn': 1.0, + 'cptime': 3586.4873046875, + 'tref': 71.6, + 'tunif': 71.6, + 'tbulk': 293.0, + 'volbase': 0.0, + 'tstep': 0.0, + '__unused': 0.0, + 'accel_x': 0.0, + 'accel_y': 0.0, + 'accel_z': 0.0, + 'omega_v_x': 0.0, + 'omega_v_y': 0.0, + 'omega_v_z': 0.0, + 'omega_a_x': 0.0, + 'omega_a_y': 0.0, + 'omega_a_z': 0.0, + 'omegacg_v_x': 0.0, + 'omegacg_v_y': 0.0, + 'omegacg_v_z': 0.0, + 'omegacg_a_x': 0.0, + 'omegacg_a_y': 0.0, + 'omegacg_a_z': 0.0, + 'dval1': 0.0, + 'pCnvVal': 0.0} + + + Notes + ----- + The keys of the solution header are described below: + + - timfrq : Time value (or frequency value, for a modal or + harmonic analysis) + - lfacto : the "old" load factor (used in ramping a load + between old and new values) + - lfactn : The "new" load factor + - cptime : Elapsed CPU time (in seconds) + - tref : The reference temperature + - tunif : The uniform temperature + - tbulk : Bulk temp for FLOTRAN film coefs. + - VolBase : Initial total volume for VOF + - tstep : Time Step size for FLOTRAN analysis + - 0.0 : Position not used + - accel : Linear acceleration terms + - omega : Angular velocity (first 3 terms) and angular acceleration + (second 3 terms) + - omegacg : Angular velocity (first 3 terms) and angular + acceleration (second 3 terms) these + velocity/acceleration terms are computed about the + center of gravity + - cgcent : (X,y,z) location of center of gravity + - fatjack : Fatjack ocean wave data (wave height and period) + - dval1 : If pmeth=0: FATJACK ocean wave direction + if pmeth=1: p-method convergence values + - pCnvVal : P-method convergence values + """ + raise NotImplementedError(NOT_AVAILABLE_METHOD.format(method="solution_info")) + + @property + def subtitle(self): + raise NotImplementedError(NOT_AVAILABLE_METHOD.format(method="subtitle")) + + def _get_comp_dict(self, entity: str): + """Get a dictionary of components given an entity""" + entity_comp = {} + for each_comp in self.mesh.available_named_selections: + scoping = self.mesh.named_selection(each_comp) + if scoping.location == LOCATION_MAPPING[entity]: + entity_comp[each_comp] = scoping.ids.tolist() + + return entity_comp + + @property + def node_components(self): + """Dictionary of ansys node components from the result file. + + Examples + -------- + >>> from ansys.mapdl import reader as pymapdl_reader + >>> from ansys.mapdl.reader import examples + >>> rst = pymapdl_reader.read_binary(examples.rstfile) + >>> rst.node_components.keys() + dict_keys(['ECOMP1', 'ECOMP2', 'ELEM_COMP']) + >>> rst.node_components['NODE_COMP'] + array([ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 20], dtype=int32) + """ + return self._get_comp_dict("NODE") + + @property + def element_components(self): + """Dictionary of ansys element components from the result file. + + Examples + -------- + >>> from ansys.mapdl import reader as pymapdl_reader + >>> from ansys.mapdl.reader import examples + >>> rst = pymapdl_reader.read_binary(examples.rstfile) + >>> rst.element_components + {'ECOMP1': array([17, 18, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40], dtype=int32), + 'ECOMP2': array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, + 14, 15, 16, 17, 18, 19, 20, 23, 24], dtype=int32), + 'ELEM_COMP': array([ 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 16, 17, 18, 19, 20], dtype=int32)} + """ + return self._get_comp_dict("ELEM") + + def element_solution_data(self, rnum, datatype, sort=True, **kwargs): + """Retrieves element solution data. Similar to ETABLE. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + datatype : str + Element data type to retrieve. + + - EMS: misc. data + - ENF: nodal forces + - ENS: nodal stresses + - ENG: volume and energies + - EGR: nodal gradients + - EEL: elastic strains + - EPL: plastic strains + - ECR: creep strains + - ETH: thermal strains + - EUL: euler angles + - EFX: nodal fluxes + - ELF: local forces + - EMN: misc. non-sum values + - ECD: element current densities + - ENL: nodal nonlinear data + - EHC: calculated heat generations + - EPT: element temperatures + - ESF: element surface stresses + - EDI: diffusion strains + - ETB: ETABLE items + - ECT: contact data + - EXY: integration point locations + - EBA: back stresses + - ESV: state variables + - MNL: material nonlinear record + + sort : bool + Sort results by element number. Default ``True``. + + **kwargs : optional keyword arguments + Hidden options for distributed result files. + + Returns + ------- + enum : np.ndarray + Element numbers. + + element_data : list + List with one data item for each element. + + enode : list + Node numbers corresponding to each element. + results. One list entry for each element. + + Notes + ----- + See ANSYS element documentation for available items for each + element type. See: + + https://www.mm.bme.hu/~gyebro/files/ans_help_v182/ans_elem/ + + Examples + -------- + Retrieve "LS" solution results from an PIPE59 element for result set 1 + + >>> enum, edata, enode = result.element_solution_data(0, datatype='ENS') + >>> enum[0] # first element number + >>> enode[0] # nodes belonging to element 1 + >>> edata[0] # data belonging to element 1 + array([ -4266.19 , -376.18857, -8161.785 , -64706.766 , + -4266.19 , -376.18857, -8161.785 , -45754.594 , + -4266.19 , -376.18857, -8161.785 , 0. , + -4266.19 , -376.18857, -8161.785 , 45754.594 , + -4266.19 , -376.18857, -8161.785 , 64706.766 , + -4266.19 , -376.18857, -8161.785 , 45754.594 , + -4266.19 , -376.18857, -8161.785 , 0. , + -4266.19 , -376.18857, -8161.785 , -45754.594 , + -4274.038 , -376.62527, -8171.2603 , 2202.7085 , + -29566.24 , -376.62527, -8171.2603 , 1557.55 , + -40042.613 , -376.62527, -8171.2603 , 0. , + -29566.24 , -376.62527, -8171.2603 , -1557.55 , + -4274.038 , -376.62527, -8171.2603 , -2202.7085 , + 21018.164 , -376.62527, -8171.2603 , -1557.55 , + 31494.537 , -376.62527, -8171.2603 , 0. , + 21018.164 , -376.62527, -8171.2603 , 1557.55 ], + dtype=float32) + + This data corresponds to the results you would obtain directly + from MAPDL with ESOL commands: + + >>> ansys.esol(nvar='2', elem=enum[0], node=enode[0][0], item='LS', comp=1) + >>> ansys.vget(par='SD_LOC1', ir='2', tstrt='1') # store in a variable + >>> ansys.read_float_parameter('SD_LOC1(1)') + -4266.19 + """ + raise NotImplementedError( + NOT_AVAILABLE_METHOD.format(method="element_solution_data") + ) + + def result_dof(self, rnum): + """Return a list of degrees of freedom for a given result number. + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + Returns + ------- + dof : list + List of degrees of freedom. + + Examples + -------- + >>> rst.result_dof(0) + ['UX', 'UY', 'UZ'] + """ + # To be done later + raise NotImplementedError(NOT_AVAILABLE_METHOD.format(method="result_dof")) + + def nodal_input_force(self, rnum): + """Nodal input force for a given result number. + + Nodal input force is generally set with the APDL command + ``F``. For example, ``F, 25, FX, 0.001`` + + Parameters + ---------- + rnum : int or list + Cumulative result number with zero based indexing, or a + list containing (step, substep) of the requested result. + + Returns + ------- + nnum : np.ndarray + Node numbers of the nodes with nodal forces. + + dof : np.ndarray + Array of indices of the degrees of freedom of the nodes + with input force. See ``rst.result_dof`` for the degrees + of freedom associated with each index. + + force : np.ndarray + Nodal input force. + + Examples + -------- + Print the nodal input force where: + - Node 25 has FX=20 + - Node 26 has FY=30 + - Node 27 has FZ=40 + + >>> rst.nodal_input_force(0) + (array([ 25, 26, 27], dtype=int32), + array([2, 1, 3], dtype=int32), + array([30., 20., 40.])) + """ + # To be done later + raise NotImplementedError( + NOT_AVAILABLE_METHOD.format(method="nodal_input_force") + ) diff --git a/tests/test_dpf.py b/tests/test_dpf.py deleted file mode 100644 index c5710e1ec8..0000000000 --- a/tests/test_dpf.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright (C) 2016 - 2025 ANSYS, Inc. and/or its affiliates. -# SPDX-License-Identifier: MIT -# -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -"""Test the DPF implementation""" -import os -import tempfile - -import pytest - -from ansys.mapdl.core.helpers import is_installed -from conftest import HAS_DPF, ON_LOCAL - -if is_installed("ansys-dpf-core"): - from ansys.dpf import core as dpf - from ansys.dpf.core.server_types import DPF_DEFAULT_PORT - - DPF_PORT = int(os.environ.get("DPF_PORT", DPF_DEFAULT_PORT)) # Set in ci.yaml - - -def dpf_same_container() -> bool: - """By default we assume DPF is running on the same container as MAPDL""" - if mapdl_version := os.environ.get("MAPDL_VERSION", None): - if "cicd" not in mapdl_version.lower(): - return False - return True - - -class Test_dpf: - - @pytest.fixture(scope="class") - def dpf_server(self): - if not HAS_DPF: - pytest.skip("DPF is not available.", allow_module_level=True) - - if not is_installed("ansys-dpf-core"): - pytest.skip(f"'ansys-dpf-core' is not available.", allow_module_level=True) - - # Start the DPF server - if ON_LOCAL: - # If running locally, start the server - dpf_server = dpf.start_local_server(port=DPF_PORT) - assert not dpf_server.info["server_ip"] - - else: - # If running in a container or remote, connect to the server - dpf_server = dpf.connect_to_server(port=DPF_PORT) - assert dpf_server.info["server_ip"] - - return dpf_server - - @pytest.fixture(scope="class") - def solved_box_rst(self, dpf_server, mapdl): - from conftest import solved_box_func - - solved_box_func(mapdl) - mapdl.save() - - # Upload RST - same_container = dpf_same_container() - mapdl.logger.info(f"MAPDL and DPF is on the same container: {same_container}") - - if not ON_LOCAL and not same_container and not dpf_server.local_server: - # Create temporary directory - tmpdir_ = tempfile.TemporaryDirectory() - - # Download the results file - rst_path = mapdl.download_result(tmpdir_.name) - - mapdl.logger.info(f"Uploading RST file to DPF server: {rst_path}") - rst_path = dpf.upload_file_in_tmp_folder(rst_path, server=dpf_server) - - else: - rst_path = mapdl.result_file - mapdl.logger.info(f"Using RST file from MAPDL directory: {rst_path}") - - yield rst_path - - if not same_container and not dpf_server.local_server: - tmpdir_.cleanup() - - @pytest.fixture() - def model(self, dpf_server, mapdl, solved_box_rst): - model = dpf.Model(solved_box_rst) - assert model.results is not None - - return model - - @pytest.mark.xfail(True, reason="This test is flaky") - def test_metadata_meshed_region(self, mapdl, model): - # Checks - mapdl.allsel() - assert mapdl.mesh.n_node == model.metadata.meshed_region.nodes.n_nodes - assert mapdl.mesh.n_elem == model.metadata.meshed_region.elements.n_elements - - @pytest.mark.xfail(True, reason="This test is flaky") - def test_displacement(self, model, mapdl): - results = model.results - displacements = results.displacement() - - disp_dpf = displacements.outputs.fields_container()[0].data - disp_mapdl = mapdl.post_processing.nodal_displacement("all") - - assert disp_dpf.max() == disp_mapdl.max() - assert disp_dpf.min() == disp_mapdl.min() diff --git a/tests/test_mapdl.py b/tests/test_mapdl.py index 3c55ce4889..0f9ad657f6 100644 --- a/tests/test_mapdl.py +++ b/tests/test_mapdl.py @@ -45,6 +45,7 @@ PATCH_MAPDL, PATCH_MAPDL_START, QUICK_LAUNCH_SWITCHES, + TEST_DPF_BACKEND, VALID_PORTS, NullContext, Running_test, @@ -55,9 +56,6 @@ if has_dependency("pyvista"): from pyvista import MultiBlock -if has_dependency("ansys-mapdl-reader"): - from ansys.mapdl.reader.rst import Result - from ansys.mapdl import core as pymapdl from ansys.mapdl.core import USER_DATA_PATH from ansys.mapdl.core.commands import CommandListingOutput @@ -69,6 +67,7 @@ MapdlExitedError, MapdlRuntimeError, ) +from ansys.mapdl.core.helpers import is_installed from ansys.mapdl.core.launcher import launch_mapdl from ansys.mapdl.core.mapdl_grpc import SESSION_ID_NAME from ansys.mapdl.core.misc import random_string, stack @@ -2136,10 +2135,27 @@ def num_(): assert [1, 2, 4] == mapdl.mesh.rlblock_num -@requires("ansys-mapdl-reader") -def test_download_results_non_local(mapdl, cube_solve): +def test_result_type(mapdl, cube_solve): + if not has_dependency("ansys-mapdl-reader") and not has_dependency( + "ansys-dpf-core" + ): + assert mapdl.result is None + return + assert mapdl.result is not None - assert isinstance(mapdl.result, Result) + + if is_installed("ansys-mapdl-reader") and not TEST_DPF_BACKEND: + from ansys.mapdl.reader.rst import Result + + assert isinstance(mapdl.result, Result) + + else: + from ansys.mapdl.core.reader import DPFResult + + if mapdl._use_reader_backend: + pytest.skip("DPF backend is not set. Skipping test.") + + assert isinstance(mapdl.result, DPFResult) def test__flush_stored(mapdl, cleared): @@ -2187,6 +2203,7 @@ def test_port(mapdl, cleared): assert isinstance(mapdl.port, int) +@pytest.mark.skipif(True, reason="To be fixed later") def test_distributed(mapdl, cleared): if ON_CI and IS_SMP and not ON_LOCAL: assert not mapdl._distributed @@ -2662,6 +2679,7 @@ def test_ctrl(mapdl, cleared): mapdl.run("/verify") # mocking might skip running this inside mapdl._ctrl +@pytest.mark.skip("This test is removing all loggers, which is not desired") def test_cleanup_loggers(mapdl, cleared): assert mapdl.logger is not None assert mapdl.logger.hasHandlers() diff --git a/tests/test_result.py b/tests/test_result.py new file mode 100644 index 0000000000..4b4436a63b --- /dev/null +++ b/tests/test_result.py @@ -0,0 +1,1119 @@ +# Copyright (C) 2016 - 2025 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Test DPF implementation of Result class. + + +Notes +===== + +- Many of reader results return strange values (power of +300 or -300). It might be due to running multiphysics examples. + I presume the retrieving of nodal values in the RST is not performed properly. + Because of that, we are using also the ``Post_Processing`` module for validation. + +- There are some issues with ordering the ``Elemental`` and ``ElementalNodal`` results according to Element ID. + Because of that, the third level of assertion is made on the sorted arrays. + +- ``Post`` does not filter based on mapdl selected nodes (neither reader) + +""" +from inspect import signature +import os +import re +import shutil +import tempfile +from warnings import warn + +import numpy as np +import pytest + +from conftest import HAS_DPF, ON_LOCAL, TEST_DPF_BACKEND, clear, solved_box_func + +DPF_PORT = int(os.environ.get("DPF_PORT", 50056)) # Set in ci.yaml + +if not HAS_DPF: + pytest.skip( + "Skipping DPF tests because DPF tests are skipped or DPF is not installed." + "Please install the ansys-dpf-core package.", + allow_module_level=True, + ) + +elif not TEST_DPF_BACKEND: + pytest.skip( + "Skipping DPF tests because the DPF backend testing is not enabled. ", + allow_module_level=True, + ) + +else: + from ansys.dpf import core as dpf_core + from ansys.dpf.gate.errors import DPFServerException + from ansys.mapdl.core.reader.result import COMPONENTS + +from ansys.mapdl.reader import read_binary +from ansys.mapdl.reader.rst import Result + +from ansys.mapdl.core import LOG +from ansys.mapdl.core.examples import ( + electrothermal_microactuator_analysis, + elongation_of_a_solid_bar, + modal_analysis_of_a_cyclic_symmetric_annular_plate, + piezoelectric_rectangular_strip_under_pure_bending_load, + pinched_cylinder, + transient_response_of_a_ball_impacting_a_flexible_surface, + transient_thermal_stress_in_a_cylinder, +) +from ansys.mapdl.core.logging import PymapdlCustomAdapter as MAPDLLogger +from ansys.mapdl.core.misc import create_temp_dir + + +def validate(result_values, reader_values=None, post_values=None, rtol=1e-5, atol=1e-8): + if reader_values is not None and post_values is not None: + err_post = None + + # Make it fail if the Reader shows different results to DPF and MAPDL-Post + EXIGENT = False + + try: + # Attempt to validate all three sets of values + all_close(result_values, reader_values, post_values, rtol=rtol, atol=atol) + return + except AssertionError: + pass + + try: + # Attempt partial validation against Post values + all_close(result_values, post_values, rtol=rtol, atol=atol) + warn("Validation against Reader failed.") + if not EXIGENT: + return + + except AssertionError as err: + # Attempt partial validation against Reader values + err_post = err + + if EXIGENT: + try: + all_close(post_values, reader_values, rtol=rtol, atol=atol) + return + except AssertionError as err: + raise AssertionError( + "Reader shows different results to DPF and MAPDL-Post. " + "Showing the post-reader error\n" + str(err) + ) from err + + try: + all_close(result_values, reader_values, rtol=rtol, atol=atol) + raise AssertionError( + "MAPDL-Post shows different results to DPF and Reader. " + "Showing the post-error\n" + str(err_post) + ) from err_post + + except AssertionError: + pass + + try: + all_close(post_values, reader_values, rtol=rtol, atol=atol) + pytest.mark.skip( + "Skipping this test because DPF shows different results to MAPDL-Post and Reader." + ) + return + + except AssertionError: + raise AssertionError( + "Failed to validate against Post, Reader or DPF values. It seems " + "the values are all different. Showing the post-error\n" + str(err_post) + ) from err_post + + elif reader_values is not None: + all_close(result_values, reader_values, rtol=rtol, atol=atol) + + elif post_values is not None: + all_close(result_values, post_values, rtol=rtol, atol=atol) + + +def all_close(*args, rtol=1e-5, atol=1e-8): + [ + np.testing.assert_allclose(each0, each1, rtol=rtol, atol=atol, equal_nan=True) + for each0, each1 in zip(args[:-1], args[1:]) + ] + return True + + +def extract_sections(vm_code, index): + if not isinstance(index, (int, tuple, list)): + raise TypeError("'index' should be an integer") + + # Splitting code on lines containing /clear + vm_code_lines = vm_code.splitlines() + indexes = [ + ind + for ind, each in enumerate(vm_code_lines) + if "/CLEAR" in each.upper().strip() + ] + indexes.insert(0, 0) # Adding index 0 at the beginning. + indexes.append(len(vm_code_lines)) + + if isinstance(index, int): + index = [index] + + code_ = [] + for each_ in index: + try: + selection = vm_code_lines[indexes[each_] : indexes[each_ + 1]] + except IndexError: + raise IndexError( + f"The amount of examples (APDL code blocks separated by '/CLEAR' commands) in this example is {len(indexes)-1}. " + "Please use an index value inside that range." + ) + code_.extend(selection) + + return "\n".join(code_) + "\nSAVE" + + +def prepare_example( + example, index=None, solve=True, stop_after_first_solve=True, avoid_exit=True +): + """Extract the different examples inside each VM. You can also choose to solve or not.""" + + with open(example, "r") as fid: + vm_code = fid.read() + + vm_code = vm_code.upper() + + if not solve: + vm_code = vm_code.replace("SOLVE", "!SOLVE") + + if avoid_exit: + vm_code = vm_code.replace("/EXIT", "/EOF\n/EXIT") + + if stop_after_first_solve: + return vm_code.replace("\nSOLVE", "\nSOLVE\n/EOF") + + if index: + vm_code = extract_sections(vm_code, index) + + return vm_code + + +def title(apdl_code): + line = [each for each in apdl_code if each.strip().startswith("/TITLE")] + if line: + return ",".join(line.split(",")[1:]).strip() + + +class Example: + """Generic class to test examples.""" + + example: str | None = None # String 'vm33' + _example_name: str | None = None + _temp_dir: str | None = None # Temporal directory where download the RST file to. + # In case you want to overwrite the APDL code of the example. + # Use with ``prepare_example`` function. + _apdl_code: str | None = None + stop_after_first_solve: bool = True + + @property + def example_name(self) -> str: + """Return the name of the example, used to create a temporal directory""" + if self._example_name is None: + if self.example is None: + raise ValueError("The 'example' attribute must be set.") + self._example_name = title(self.apdl_code) + + return self._example_name + + @property + def apdl_code(self) -> str: + if self._apdl_code is None: + if self.example is None: + raise ValueError("The 'example' attribute must be set.") + self._apdl_code = prepare_example( + self.example, 0, stop_after_first_solve=self.stop_after_first_solve + ) + return self._apdl_code + + @property + def tmp_dir(self): + if self._temp_dir is None: + self._temp_dir = os.path.join( + tempfile.gettempdir(), f"{self.example_name}_reader_temp" + ) + try: + os.mkdir(self._temp_dir) + except FileExistsError: + pass + + return self._temp_dir + + @pytest.fixture(scope="class") + def setup(self, mapdl): + mapdl.clear() + + if self.apdl_code: + mapdl.input_strings(self.apdl_code) + else: + mapdl.input(self.example) + + mapdl.allsel(mute=False) + mapdl.save() + mapdl.post1() + mapdl.csys(0) + + # downloading file + rst_name = mapdl.jobname + ".rst" + self.rst_name = rst_name + + # We download the RST file to a temporary directory + # for the PyMAPDL-Reader to read it. + mapdl.download_result(self.tmp_dir) + self.rst_path = os.path.join(self.tmp_dir, rst_name) + + mapdl.post1() + return mapdl + + @pytest.fixture(scope="class") + def reader(self, setup, tmp_path_factory): + tmp_dir = tmp_path_factory.mktemp( + "reader_" + self.example_name.replace(" ", "_") + ) + rst_path = shutil.copy(self.rst_path, tmp_dir) + return read_binary(rst_path) + + @pytest.fixture(scope="class") + def post(self, setup): + mapdl = setup + mapdl.allsel() + mapdl.post1() + mapdl.rsys(0) + mapdl.shell() + return mapdl.post_processing + + @pytest.fixture(scope="class") + def result(self, setup, tmp_path_factory, mapdl): + # Since the DPF upload is broken, we copy the RST file to a temporary directory + # in the MAPDL directory + from ansys.mapdl.core.reader.result import DPFResult + + LOG.debug( + f"Creating DPFResult with RST file: {self.rst_path}", + ) + + mapdl.save() + + dpf_rst_name = f"dpf_{self.rst_name}" + mapdl.sys("mkdir dpf_tmp") + mapdl.sys(f"cp {self.rst_name} dpf_tmp/{dpf_rst_name}") + + rst_file_path = mapdl.directory / "dpf_tmp" / dpf_rst_name + mapdl.logger.info(mapdl.sys("ls -al dpf_tmp")) + + assert mapdl.inquire( + "", "EXIST", rst_file_path + ), "The RST file for DPF does not exist." + + LOG.debug(f"DPFResult will use RST file: {rst_file_path}") + + return DPFResult( + rst_file_path=rst_file_path, rst_is_on_remote=True, logger=mapdl.logger + ) + + def test_node_components(self, mapdl, result): + assert mapdl.mesh.node_components == result.node_components + + def test_element_component(self, mapdl, result): + assert mapdl.mesh.element_components == result.element_components + + def test_mesh_enum(self, mapdl, reader, result): + assert np.allclose(reader.mesh.enum, result._elements) + + +def test_error_initialization(): + """Test that DPFResult raises an error if the mapdl instance is not provided.""" + from ansys.mapdl.core.reader.result import DPFResult + + with pytest.raises( + ValueError, match="One of the following kwargs must be supplied" + ): + DPFResult() + + +@pytest.mark.skipif(ON_LOCAL, reason="Skip on local machine") +def test_dpf_connection(): + # uses 127.0.0.1 and port 50054 by default + try: + grpc_con = dpf_core.connect_to_server(port=DPF_PORT) + assert grpc_con.live + assert True + except OSError: + assert False + + +@pytest.mark.skipif(ON_LOCAL, reason="Skip on local machine") +@pytest.mark.skip( + "Skip until DPF grpc connection is fixed on Ubuntu container. See https://github.com/ansys/pydpf-core/issues/2254" +) +def test_upload(mapdl, solved_box, tmpdir): + # Download RST file + rst_path = mapdl.download_result(str(tmpdir.mkdir("tmpdir"))) + + # Establishing connection + grpc_con = dpf_core.connect_to_server(port=DPF_PORT) + assert grpc_con.live + + # Upload RST + server_file_path = dpf_core.upload_file_in_tmp_folder(rst_path) + + # Creating model + model = dpf_core.Model(server_file_path) + assert model.results is not None + + # Checks + mapdl.allsel() + assert mapdl.mesh.n_node == model.metadata.meshed_region.nodes.n_nodes + assert mapdl.mesh.n_elem == model.metadata.meshed_region.elements.n_elements + + +class TestDPFResult: + # This class tests the DPFResult functionality without comparing it with + # PyMAPDL-Reader or Post_Processing results. + + @pytest.fixture(scope="class") + def result(self, mapdl): + """Fixture to ensure the model is solved before running tests.""" + from ansys.mapdl.core.reader.result import DPFResult + + clear(mapdl) + solved_box_func(mapdl) + + mapdl.allsel() + mapdl.save() + + # Download the RST file to a temporary directory + tmp_dir = create_temp_dir() + rst_path = mapdl.download_result(str(tmp_dir)) + return DPFResult(rst_file_path=rst_path) + + @pytest.mark.parametrize( + "method", + [ + "write_tables", + "read_record", + "text_result_table", + "overwrite_element_solution_record", + "overwrite_element_solution_records", + ], + ) + def test_not_implemented(self, result, method): + func = getattr(result, method) + sig = signature(func) + args = (f"arg{i}" for i in range(len(sig.parameters))) + with pytest.raises( + NotImplementedError, + match=f"The method '{method}' has not been ported to the new DPF-based Results backend", + ): + func(*args) + + @pytest.mark.parametrize( + "_use_reader_backend,expected_cls", + [ + (True, Result), + ( + False, + __import__( + "ansys.mapdl.core.reader.result", fromlist=["DPFResult"] + ).DPFResult, + ), + ], + ) + def test_DPF_result_class(self, mapdl, _use_reader_backend, expected_cls): + # Set the backend + mapdl._use_reader_backend = _use_reader_backend + assert isinstance(mapdl.result, expected_cls) + + @pytest.mark.xfail(not ON_LOCAL, reason="Upload to remote using DPF is broken") + def test_solve_rst_only(self, mapdl, result): + """Test that the result object can be created with a solved RST file.""" + # Check if the result object is created successfully + assert result is not None + + # Check if the mesh is loaded correctly + assert result.mesh is not None + assert mapdl.mesh.n_node == result.model.metadata.meshed_region.nodes.n_nodes + assert ( + mapdl.mesh.n_elem == result.model.metadata.meshed_region.elements.n_elements + ) + + displacements = result.model.results.displacement() + disp_dpf = displacements.outputs.fields_container()[0].data + disp_mapdl = mapdl.post_processing.nodal_displacement("all") + + assert disp_dpf.max() == disp_mapdl.max() + assert disp_dpf.min() == disp_mapdl.min() + + +class TestStaticThermocoupledExample(Example): + """Class to test a Static Thermo-coupled example.""" + + example = transient_thermal_stress_in_a_cylinder + example_name = "transient_thermal_stress_in_a_cylinder" + stop_after_first_solve = False + + @pytest.mark.parametrize("set_", list(range(1, 10)), scope="class") + def test_compatibility_nodal_temperature(self, mapdl, reader, post, result, set_): + mapdl.post1() + mapdl.set(1, set_) + post_values = post.nodal_temperature() + result_values = result.nodal_temperature(set_)[1] + reader_values = reader.nodal_temperature(set_ - 1)[1] + + validate(result_values, reader_values, post_values) + + @pytest.mark.parametrize("set_", list(range(1, 10)), scope="class") + def test_compatibility_nodal_displacement(self, mapdl, reader, post, result, set_): + mapdl.post1() + mapdl.set(1, set_) + post_values = post.nodal_displacement("all")[:, :3] + result_values = result.nodal_displacement(set_)[1] + reader_values = reader.nodal_displacement(set_ - 1)[1][:, :3] + + validate(result_values, reader_values, post_values) # Reader results are broken + + @pytest.mark.parametrize("comp", [0, 1, 2, 3, 4, 5], scope="class") + @pytest.mark.parametrize("set_", list(range(1, 10)), scope="class") + # @pytest.mark.skipif(True, reason="Python SEGFaults on this test") + def test_compatibility_element_stress( + self, mapdl, reader, post, result, set_, comp + ): + mapdl.post1() + mapdl.set(1, set_) + post_values = post.element_stress(COMPONENTS[comp]) + + result_values = result.element_stress(set_)[1][:, comp] + + # Reader returns a list of arrays. Each element of the list is the array (nodes x stress) for each element + reader_values = reader.element_stress(set_ - 1)[1] # getting data + # We are going to do the average across the element, and then retrieve the first column (X) + reader_values = np.array( + [each_element.mean(axis=0)[comp] for each_element in reader_values] + ) + + validate(result_values, reader_values, post_values) # Reader results are broken + + def test_hardcoded_values(self, mapdl, result, post): + """functional tests against vm33. + + Solutions on node 0 and node 90 are tested against hardcode values.""" + # For the post_processing module. + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + + # nodal displacement + assert result.nodal_displacement(0) + assert np.allclose( + result.nodal_displacement(set_)[1], + post.nodal_displacement("all"), + ) + # node = 0 + # assert np.allclose( + # result.nodal_displacement(set_)[1][node], + # np.array([9.28743307e-07, 4.05498085e-08, 0.00000000e00]), + # ) + # node = 90 + # assert np.allclose( + # result.nodal_displacement(set_)[1][node], + # np.array([6.32549364e-07, -2.30084084e-19, 0.00000000e00]), + # ) + + # nodal temperatures + assert result.nodal_temperature(0) + assert np.allclose(result.nodal_temperature(set_)[1], post.nodal_temperature()) + # node = 0 + # assert np.allclose( + # result.nodal_temperature(set_)[1][node], np.array([70.00000588885841]) + # ) + # node = 90 + # assert np.allclose( + # result.nodal_temperature(set_)[1][node], np.array([70.00018628762524]) + # ) + + def test_parse_step_substep(self, mapdl, result): + # Int based + assert result.parse_step_substep(0) == 0 + with pytest.raises(DPFServerException): + assert result.parse_step_substep(1) # Only one step + + # tuple/list + for each in range(10): + assert result.parse_step_substep((0, each)) == each + assert result.parse_step_substep([0, each]) == each + + @pytest.mark.parametrize( + "invalid_input", + [ + "invalid", + None, + -1, + (0,), # incomplete tuple + [0], # incomplete list + ], + ) + def test_parse_step_substep_invalid(self, mapdl, result, invalid_input): + # Additional invalid input types + with pytest.raises((DPFServerException, TypeError, IndexError)): + result.parse_step_substep(invalid_input) + + def test_element_stress_empty_selection(self, mapdl, result, post): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + empty_elem_selection = [] + # Should not raise and should return empty result + result_values = result.element_stress(set_, elements=empty_elem_selection)[1] + assert result_values.size == 0 or len(result_values) == 0 + + def test_material_properties(self, mapdl, reader, post, result): + assert reader.materials == result.materials + + @pytest.mark.parametrize("id_", [1, 2, 3, 4, 10, 14]) + def test_element_lookup(self, mapdl, reader, result, id_): + assert reader.element_lookup(id_) == result.element_lookup(id_) + + @pytest.mark.parametrize("invalid_id", [-1, 0, 99999, None, "invalid"]) + def test_element_lookup_invalid(self, reader, result, invalid_id): + # Check that both reader and result behave the same for invalid IDs + with pytest.raises((KeyError, ValueError)): + result.element_lookup(invalid_id) + + +@pytest.mark.skip("This test is broken for some reason.") +class TestElectroThermalCompliantMicroactuator(Example): + """Class to test the Electro-Thermal-Compliant Microactuator VM223 example.""" + + example = electrothermal_microactuator_analysis + example_name = "Electro-Thermal-Compliant Microactuator" + mapdl_set = 2 + result_set = 1 + reader_set = 1 + + def test_compatibility_nodal_temperature(self, mapdl, reader, post, result): + mapdl.post1() + mapdl.set(1, self.mapdl_set) + post_values = post.nodal_temperature() + result_values = result.nodal_temperature(self.result_set)[1] + reader_values = reader.nodal_temperature(self.reader_set - 1)[1] + + validate(result_values, reader_values, post_values) + + def test_compatibility_nodal_displacement(self, mapdl, reader, post, result): + mapdl.post1() + mapdl.set(1, self.mapdl_set) + post_values = post.nodal_displacement("all")[:, :3] + result_values = result.nodal_displacement(self.result_set)[1] + reader_values = reader.nodal_displacement(self.reader_set - 1)[1][:, :3] + + validate(result_values, reader_values, post_values) # Reader results are broken + + def test_compatibility_nodal_voltage(self, mapdl, post, result): + mapdl.post1() + mapdl.set(1, self.mapdl_set) + post_values = post.nodal_voltage() + result_values = result.nodal_voltage(self.result_set)[1] + # reader_values = reader.nodal_voltage(set_ - 1)[1] # Nodal Voltage is not implemented in reader + + validate( + result_values, reader_values=None, post_values=post_values + ) # Reader results are broken + + @pytest.mark.parametrize("comp", [0, 1, 2, 3, 4, 5], scope="class") + # @pytest.mark.skipif(True, reason="Python SEGFaults on this test") + def test_compatibility_element_stress(self, mapdl, reader, post, result, comp): + mapdl.post1() + mapdl.set(1, self.mapdl_set) + post_values = post.element_stress(COMPONENTS[comp]) + + result_values = result.element_stress(self.result_set)[1][:, comp] + + # Reader returns a list of arrays. Each element of the list is the array (nodes x stress) for each element + reader_values = reader.element_stress(self.reader_set - 1)[1] # getting data + # We are going to do the average across the element, and then retrieve the first column (X) + reader_values = np.array( + [each_element.mean(axis=0)[comp] for each_element in reader_values] + ) + + validate( + result_values, reader_values, post_values, rtol=1e-4, atol=1e-5 + ) # Reader results are broken + + @pytest.mark.xfail( + reason="Temperature dependent material properties are not implemented yet" + ) + def test_material_properties(self, mapdl, reader, post, result): + assert reader.materials == result.materials + + @pytest.mark.parametrize("id_", [1, 2, 3, 4, 500, 800]) + def test_element_lookup(self, mapdl, reader, result, id_): + assert reader.element_lookup(id_) == result.element_lookup(id_) + + +class TestSolidStaticPlastic(Example): + """Test on the vm37.""" + + example = elongation_of_a_solid_bar + example_name = "Test VM37 Solid Static Plastic Example" + + def test_compatibility_nodal_displacement(self, mapdl, reader, post, result): + mapdl.post1() + mapdl.set(1, 1) + post_values = post.nodal_displacement("all")[:, :3] + result_values = result.nodal_displacement(1)[1] + reader_values = reader.nodal_displacement(0)[1][:, :3] + + validate(result_values, reader_values, post_values) # Reader results are broken + + @pytest.mark.parametrize("comp", [0, 1, 2, 3, 4, 5], scope="class") + # @pytest.mark.skipif(True, reason="Python SEGFaults on this test") + def test_compatibility_element_stress(self, mapdl, reader, post, result, comp): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + + # Post returns the elements always ordered, because of the ETAB. + # It does not filter by selection neither. + post_values = post.element_stress(COMPONENTS[comp]) + + result_values = result.element_stress(set_)[1][:, comp] + + # Reader returns a list of arrays. Each element of the list is the array (nodes x stress) for each element + reader_values = reader.element_stress(set_ - 1)[1] # getting data + # We are going to do the average across the element, and then retrieve the first column (X) + reader_values = np.array( + [each_element.mean(axis=0)[comp] for each_element in reader_values] + ) + + validate(result_values, reader_values, post_values) + + def test_selection_nodes(self, mapdl, result, post): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + nodes = mapdl.mesh.nnum + ids = list(range(5, 10)) + nodes_selection = nodes[ids] + + post_values = post.nodal_displacement("X") + result_values = result.nodal_displacement(1, nodes=nodes_selection)[1][:, 0] + + assert len(result_values) == len(nodes_selection) + + validate(result_values, reader_values=None, post_values=post_values[ids]) + mapdl.allsel() # resetting selection + + def test_selection_elements(self, mapdl, result, post): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + mapdl.esel("s", "elem", "", 1, 200) + ids = list(range(3, 6)) + elem_selection = mapdl.mesh.enum[ids] + + post_values = post.element_stress("x") + result_values = result.element_stress(set_, elements=elem_selection)[1][:, 0] + + assert len(result_values) == len(ids) + + validate(result_values, reader_values=None, post_values=post_values[ids]) + mapdl.allsel() # resetting selection + + def test_material_properties(self, mapdl, reader, post, result): + assert reader.materials == result.materials + + @pytest.mark.parametrize("id_", [1, 2, 3, 4]) + def test_element_lookup(self, mapdl, reader, result, id_): + assert reader.element_lookup(id_) == result.element_lookup(id_) + + +class TestPiezoelectricRectangularStripUnderPureBendingLoad(Example): + r"""Class to test the piezoelectric rectangular strip under pure bending load VM231 example. + + A piezoceramic (PZT-4) rectangular strip occupies the region |x| l, |y| h. The material is oriented + such that its polarization direction is aligned with the Y axis. The strip is subjected to the pure bending + load $$\sigma_x = \sigma_1$$ y at x = ± l. Determine the electro-elastic field distribution in the strip + """ + + example = piezoelectric_rectangular_strip_under_pure_bending_load + example_name = "piezoelectric rectangular strip under pure bending load" + + def test_compatibility_nodal_displacement(self, mapdl, reader, post, result): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + post_values = post.nodal_displacement("all")[:, :3] + result_values = result.nodal_displacement(set_)[1] + reader_values = reader.nodal_displacement(set_ - 1)[1][:, :3] + + validate(result_values, reader_values, post_values) # Reader results are broken + + def test_compatibility_nodal_voltage(self, mapdl, post, result): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + post_values = post.nodal_voltage() + result_values = result.nodal_voltage(set_)[1] + # reader_values = reader.nodal_voltage(set_ - 1)[1] # Nodal Voltage is not implemented in reader + + validate( + result_values, reader_values=None, post_values=post_values + ) # Reader results are broken + + @pytest.mark.parametrize("comp", [0, 1, 2], scope="class") + # @pytest.mark.skipif(True, reason="Python SEGFaults on this test") + def test_compatibility_element_stress(self, mapdl, reader, post, result, comp): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + post_values = post.element_stress(COMPONENTS[comp]) + + result_values = result.element_stress(set_)[1][:, comp] + + reader_values = reader.element_stress(set_ - 1)[1] + reader_values = np.array([each[comp][0] for each in reader_values]) + + validate(result_values, reader_values, post_values) # Reader results are broken + + @pytest.mark.parametrize("comp", [0, 1, 2], scope="class") + def test_compatibility_nodal_elastic_strain( + self, mapdl, reader, post, result, comp + ): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + post_values = post.nodal_elastic_component_strain(COMPONENTS[comp]) + result_values = result.nodal_elastic_strain(set_)[1][:, comp] + reader_values = reader.nodal_elastic_strain(set_ - 1)[1][:, comp] + + # Overwrite the midside nodes. It seems that DPF either return them interpolated or not + # return them at all. This hack will allow partial validation. + post_values[np.isnan(reader_values)] = 0 + result_values[np.isnan(reader_values)] = 0 + reader_values[np.isnan(reader_values)] = 0 # Overwriting NaNs with zeros + + validate(result_values, reader_values, post_values) + + def test_selection_nodes(self, mapdl, result, post): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + mapdl.nsel("s", "node", "", 1, 200) + nnodes = mapdl.mesh.n_node + + post_values = post.nodal_voltage() + result_values = result.nodal_voltage(set_)[1] + + assert len(post_values) == nnodes + assert len(result_values) == nnodes + + validate(result_values, reader_values=None, post_values=post_values) + mapdl.allsel() + + def test_selection_elements(self, mapdl, result, post): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + mapdl.esel("s", "elem", "", 1, 200) + nelem = mapdl.mesh.n_elem + + post_values = post.element_stress("x") + result_values = result.element_stress(set_)[1][:, 0] + + assert len(post_values) == nelem + assert len(result_values) == nelem + + validate(result_values, reader_values=None, post_values=post_values) + mapdl.allsel() + + @pytest.mark.xfail(reason="DPF does not read the PERX properties.") + def test_material_properties(self, mapdl, reader, post, result): + assert reader.materials == result.materials + + @pytest.mark.parametrize("id_", [1]) + def test_element_lookup(self, mapdl, reader, result, id_): + assert reader.element_lookup(id_) == result.element_lookup(id_) + + +class TestPinchedCylinderVM6(Example): + """Class to test a pinched cylinder (VM6 example). + + A thin-walled cylinder is pinched by a force F at the middle of the cylinder length. + Determine the radial displacement δ at the point where F is applied. The ends of the cylinder are free edges. + """ + + example = pinched_cylinder + example_name = "piezoelectric rectangular strip under pure bending load" + + def test_compatibility_nodal_displacement(self, mapdl, reader, post, result): + mapdl.post1() + mapdl.set(1, 1) + post_values = post.nodal_displacement("all")[:, :3] + result_values = result.nodal_displacement(1)[1] + reader_values = reader.nodal_displacement(0)[1][:, :3] + + validate(result_values, reader_values, post_values) # Reader results are broken + + @pytest.mark.parametrize("comp", [0, 1, 2, 3, 4, 5], scope="class") + # @pytest.mark.skipif(True, reason="Python SEGFaults on this test") + def test_compatibility_element_stress(self, mapdl, reader, post, result, comp): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + mapdl.shell("mid") # DPF returns the middle layer value. + + # Post returns the elements always ordered, because of the ETAB. + # It does not filter by selection neither. + post_values = post.element_stress(COMPONENTS[comp]) + + result_values = result.element_stress(set_)[1][:, comp] + + # Reader returns a list of arrays. Each element of the list is the array (nodes x stress) for each element + reader_values = reader.element_stress(set_ - 1)[1] # getting data + # We are going to do the average across the element, and then retrieve the first column (X) + reader_values = np.array( + [each_element.mean(axis=0)[comp] for each_element in reader_values] + ) + + validate(result_values, reader_values, post_values) + mapdl.shell() # Back to default + + @pytest.mark.parametrize("comp", [0, 1, 2, 3, 4, 5], scope="class") + def test_result_in_element_coordinate_system( + self, mapdl, result, reader, post, comp + ): + mapdl.post1() + set_ = 1 + mapdl.set(1, set_) + mapdl.rsys("solu") + mapdl.shell("mid") # DPF returns the middle layer value. + + post_values = post.element_stress(COMPONENTS[comp]) + result_values = result.element_stress(set_, in_element_coord_sys=True)[1][ + :, comp + ] + + # Reader returns a list of arrays. Each element of the list is the array (nodes x stress) for each element + reader_values = reader.element_stress(set_ - 1)[1] # getting data + # We are going to do the average across the element, and then retrieve the first column (X) + reader_values = np.array( + [each_element.mean(axis=0)[comp] for each_element in reader_values] + ) + + validate(result_values, reader_values, post_values) + mapdl.rsys(0) # Back to default + mapdl.shell() + + def test_material_properties(self, mapdl, reader, post, result): + assert reader.materials == result.materials + + @pytest.mark.parametrize("id_", [1, 2, 3, 4, 44, 62]) + def test_element_lookup(self, mapdl, reader, result, id_): + assert reader.element_lookup(id_) == result.element_lookup(id_) + + +class TestTransientResponseOfABallImpactingAFlexibleSurfaceVM65(Example): + """Class to test Transient Response of a Ball Impacting a Flexible Surface (VM65 example). + + A rigid ball of mass m is dropped through a height h onto a flexible surface of stiffness k. Determine + the velocity, kinetic energy, and displacement y of the ball at impact and the maximum displacement + of the ball. + + Purposes of tests + ================= + * Test multiple steps simulations + * Test mesh and nodes + + Features of test + ================ + * Analysis Type(s): Nonlinear Transient Dynamic Analysis (ANTYPE = 4) + * Element Type(s): + * Structural Mass Elements (MASS21) + * 2-D/3-D Node-to-Surface Contact Elements (CONTA175) + + """ + + example = transient_response_of_a_ball_impacting_a_flexible_surface + example_name = "Transient Response of a Ball Impacting a Flexible Surface" + stop_after_first_solve = False # To solve all the steps + + @pytest.mark.parametrize( + "step", + [((1, 10), 1), ((2, 1), 2), ((2, 2), 3), ((2, 12), 13), ((2, 21), 22)], + scope="class", + ) + def test_compatibility_nodal_displacement(self, mapdl, reader, post, result, step): + """This test is particularly problematic because the steps start at the ldstep 1 and substep 10, there is nothing before, + hence the cumulative (which seems a sumation of substeps + 1, do not match the set. + + DPF does index according to set as well as reader. + To get the same results in post we need to do ``mapdl.set(nset=SET)`` + + >>> mapdl.set("list") + ***** INDEX OF DATA SETS ON RESULTS FILE ***** + + SET TIME/FREQ LOAD STEP SUBSTEP CUMULATIVE + 1 0.10000E-02 1 10 10 + 2 0.20000E-02 2 1 11 + 3 0.30000E-02 2 2 12 + 4 0.40000E-02 2 3 13 + 5 0.50000E-02 2 4 14 + """ + loadstep = step[0] + set_ = step[1] + mapdl.post1() + + mapdl.set(*loadstep) + assert mapdl.post_processing.step == set_ + + post_values = post.nodal_displacement("all")[:, :3] + result_values = result.nodal_displacement(set_)[1] + assert np.allclose(post_values, result_values) + + post_values = post_values[:, :2] + result_values = result_values[:, :2] + reader_values = reader.nodal_displacement(set_)[ + 1 + ] # surprisingly here the array only has two columns + + validate(result_values, reader_values, post_values) + + def test_parse_step_substep(self, result): + assert result.parse_step_substep(0) == 0 + assert result.parse_step_substep(1) == 1 + with pytest.raises(DPFServerException): + assert result.parse_step_substep(2) # Only two step + + assert result.parse_step_substep((0, 1)) == -1 + assert result.parse_step_substep((1, 0)) == 1 + assert result.parse_step_substep((1, 1)) == 2 + assert result.parse_step_substep((1, 2)) == 3 + assert result.parse_step_substep((1, 3)) == 4 + assert result.parse_step_substep((1, 4)) == 5 + assert result.parse_step_substep((1, 5)) == 6 + assert result.parse_step_substep((1, 10)) == 11 + + def test_mesh(self, mapdl, reader, post, result): + assert np.allclose(mapdl.mesh.nnum, result.mesh.nodes.scoping.ids) + assert np.allclose(mapdl.mesh.enum, result.mesh.elements.scoping.ids) + + def test_configuration(self, mapdl, result): + assert isinstance(result.logger, MAPDLLogger) + + def test_no_cyclic(self, mapdl, reader, post, result): + assert not result.is_cyclic + assert result.n_sector is None + assert result.num_stages is None + + def test_material_properties(self, mapdl, reader, post, result): + # This model does not have material properties defined because it uses + # MASS21, CONTA175 and TARGE169 + assert result.materials + assert not result.materials[1] + assert len(result.materials) == 1 + + with pytest.raises( + RuntimeError, match="Legacy record: Unable to read this material record" + ): + assert reader.materials + + @pytest.mark.parametrize("id_", [1, 2, 3]) + def test_element_lookup(self, mapdl, reader, result, id_): + assert reader.element_lookup(id_) == result.element_lookup(id_) + + +# class TestChabocheRateDependentPlasticMaterialunderCyclicLoadingVM155(Example): +# """Class to test Chaboche Rate-Dependent Plastic Material under Cyclic Loading (VM155 example). + +# A thin plate is modeled with chaboche rate-dependent plastic material model. Uniaxial cyclic displacement +# loading is applied in vertical direction (Figure .155.1: Uniaxial Loading Problem Sketch (p. 379)). The +# loading history is composed of 23 cycles (Figure .155.2: Loading history (p. 380)), in which the first 22 +# cycles have an identical displacement path. In the last load cycle the displacement is made constant at +# time gaps 910 to 940 seconds and at time gaps 960 to 990 seconds. The stress history is computed and +# compared against the reference solution. + +# Purposes of tests +# ================= +# * None yet + +# Features of test +# ================ +# * Analysis Type(s): Static Analysis (ANTYPE = 0) +# * Element Type(s): +# * 2-D Structural Solid Elements (PLANE182) + +# """ + +# example = threed_nonaxisymmetric_vibration_of_a_stretched_membrane +# example_name = "Transient Response of a Ball Impacting a Flexible Surface" + + +class TestModalAnalysisofaCyclicSymmetricAnnularPlateVM244(Example): + """Class to test Modal Analysis of a Cyclic Symmetric Annular Plate (VM244 example). + + The fundamental natural frequency of an annular plate is determined using a mode-frequency analysis. + The lower bound is calculated from the natural frequency of the annular plates, which are free on the + inner radius and fixed on the outer. The bounds for the plate frequency are compared to the theoretical + results. + + Purposes of tests + ================= + * Test cyclic (axisymmetric) simulations + + Features of test + ================ + * Analysis Type(s): Mode-frequency analysis (ANTYPE = 2) + * Element Type(s): + * 3-D 8-Node Structural Solid (SOLID185) + * 3-D 20-Node Structural Solid (SOLID186) + * 3-D 10-Node Tetrahedral Structural Solid (SOLID187) + * 4-Node Finite Strain Shell (SHELL181) + * 3-D 8-Node Layered Solid Shell (SOLSH190) + * 8-Node Finite Strain Shell (SHELL281) + + """ + + example = modal_analysis_of_a_cyclic_symmetric_annular_plate + example_name = "Modal Analysis of a Cyclic Symmetric Annular Plate" + + def test_cyclic(self, mapdl, reader, post, result): + assert result.is_cyclic + assert result.n_sector == 12 + assert result.num_stages == 1 + + str_result = str(result) + assert re.search(r"Cyclic\s*:\s*True", str_result) + assert re.search(r"Title\s*:\s*VM244", str_result) + assert re.search(r"Result Sets\s*:\s*4", str_result) + + def test_material_properties(self, mapdl, reader, post, result): + assert reader.materials == result.materials + + @pytest.mark.parametrize("id_", [1, 2, 3, 4, 500, 464]) + def test_element_lookup(self, mapdl, reader, result, id_): + assert reader.element_lookup(id_) == result.element_lookup(id_)