From 3815442e9d3756bbb122ba727cd2c52b6534cf63 Mon Sep 17 00:00:00 2001 From: mayankansys Date: Wed, 4 Mar 2026 12:35:01 +0530 Subject: [PATCH 1/3] fix: replace top-level package proxy imports with direct module imports --- src/ansys/fluent/core/codegen/allapigen.py | 2 +- .../core/codegen/builtin_settingsgen.py | 9 ++--- src/ansys/fluent/core/data_model_cache.py | 2 +- .../fluent/core/docker/docker_compose.py | 2 +- src/ansys/fluent/core/fluent_connection.py | 17 +++++----- .../fluent/core/launcher/fluent_container.py | 33 +++++++++---------- .../fluent/core/launcher/launch_options.py | 2 +- src/ansys/fluent/core/launcher/launcher.py | 8 ++--- .../fluent/core/launcher/launcher_utils.py | 2 +- src/ansys/fluent/core/launcher/server_info.py | 4 +-- .../fluent/core/launcher/slurm_launcher.py | 4 +-- src/ansys/fluent/core/logger.py | 2 +- src/ansys/fluent/core/search.py | 10 ++---- src/ansys/fluent/core/services/api_upgrade.py | 2 +- .../fluent/core/services/datamodel_se.py | 11 ++----- .../fluent/core/services/health_check.py | 4 +-- .../fluent/core/services/interceptors.py | 4 +-- src/ansys/fluent/core/session_pure_meshing.py | 4 +-- src/ansys/fluent/core/session_shared.py | 10 +++--- src/ansys/fluent/core/session_solver.py | 5 +-- src/ansys/fluent/core/solver/flobject.py | 7 ++-- .../datamodel_event_streaming.py | 2 +- .../streaming_services/datamodel_streaming.py | 6 ++-- src/ansys/fluent/core/system_coupling.py | 6 ++-- src/ansys/fluent/core/utils/data_transfer.py | 8 ++--- src/ansys/fluent/core/utils/fluent_version.py | 5 +-- src/ansys/fluent/core/utils/networking.py | 2 +- 27 files changed, 79 insertions(+), 94 deletions(-) diff --git a/src/ansys/fluent/core/codegen/allapigen.py b/src/ansys/fluent/core/codegen/allapigen.py index fb968843319d..77aa39fcff71 100644 --- a/src/ansys/fluent/core/codegen/allapigen.py +++ b/src/ansys/fluent/core/codegen/allapigen.py @@ -26,13 +26,13 @@ from pathlib import Path import pickle -from ansys.fluent.core import config from ansys.fluent.core.codegen import ( # noqa: F401 builtin_settingsgen, datamodelgen, settingsgen, tuigen, ) +from ansys.fluent.core.module_config import config from ansys.fluent.core.search import get_api_tree_file_name diff --git a/src/ansys/fluent/core/codegen/builtin_settingsgen.py b/src/ansys/fluent/core/codegen/builtin_settingsgen.py index f09525abfe31..7523eb8b5a91 100644 --- a/src/ansys/fluent/core/codegen/builtin_settingsgen.py +++ b/src/ansys/fluent/core/codegen/builtin_settingsgen.py @@ -24,23 +24,24 @@ import re -from ansys.fluent.core import FluentVersion, config +from ansys.fluent.core.module_config import config from ansys.fluent.core.solver.flobject import ( CreatableNamedObjectMixin, NamedObject, _ChildNamedObjectAccessorMixin, ) from ansys.fluent.core.solver.settings_builtin_data import DATA -from ansys.fluent.core.utils.fluent_version import all_versions +from ansys.fluent.core.utils.fluent_version import FluentVersion, all_versions _PY_FILE = config.codegen_outdir / "solver" / "settings_builtin.py" _PYI_FILE = config.codegen_outdir / "solver" / "settings_builtin.pyi" def _get_settings_root(version: str): - from ansys.fluent.core import config, utils + from ansys.fluent.core.module_config import config + from ansys.fluent.core.utils import load_module as _load_module - settings = utils.load_module( + settings = _load_module( f"settings_{version}", config.codegen_outdir / "solver" / f"settings_{version}.py", ) diff --git a/src/ansys/fluent/core/data_model_cache.py b/src/ansys/fluent/core/data_model_cache.py index e59275f5fd54..f81bc65e76cd 100644 --- a/src/ansys/fluent/core/data_model_cache.py +++ b/src/ansys/fluent/core/data_model_cache.py @@ -126,7 +126,7 @@ def update(self, d: dict[str, Any], d1: dict[str, Any]): def _is_dict_parameter_type(version: FluentVersion, rules: str, rules_path: str): """Check if a parameter is a dict type.""" - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config from ansys.fluent.core.services.datamodel_se import ( PyDictionary, PyNamedObjectContainer, diff --git a/src/ansys/fluent/core/docker/docker_compose.py b/src/ansys/fluent/core/docker/docker_compose.py index 62b65d9f1921..bd411955159e 100644 --- a/src/ansys/fluent/core/docker/docker_compose.py +++ b/src/ansys/fluent/core/docker/docker_compose.py @@ -33,7 +33,7 @@ class ComposeBasedLauncher: """Launch Fluent through docker or Podman compose.""" def __init__(self, compose_config, container_dict, container_server_info_file): - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config self._compose_config = compose_config self._compose_name = f"pyfluent_compose_{uuid.uuid4().hex}" diff --git a/src/ansys/fluent/core/fluent_connection.py b/src/ansys/fluent/core/fluent_connection.py index d4316c7241c4..926f86d80222 100644 --- a/src/ansys/fluent/core/fluent_connection.py +++ b/src/ansys/fluent/core/fluent_connection.py @@ -43,7 +43,6 @@ from deprecated.sphinx import deprecated import grpc -import ansys.fluent.core as pyfluent from ansys.fluent.core.launcher.error_warning_messages import ( ALLOW_REMOTE_HOST_NOT_PROVIDED_IN_REMOTE, CERTIFICATES_FOLDER_NOT_PROVIDED_AT_CONNECT, @@ -51,6 +50,7 @@ INSECURE_MODE_WARNING, ) from ansys.fluent.core.launcher.launcher_utils import ComposeConfig +from ansys.fluent.core.module_config import config from ansys.fluent.core.pyfluent_warnings import InsecureGrpcWarning from ansys.fluent.core.services import service_creator from ansys.fluent.core.services.app_utilities import ( @@ -61,6 +61,7 @@ from ansys.fluent.core.services.scheme_eval import SchemeEvalService from ansys.fluent.core.utils.execution import timeout_exec, timeout_loop from ansys.fluent.core.utils.file_transfer_service import ContainerFileTransferStrategy +from ansys.fluent.core.utils.fluent_version import FluentVersion from ansys.fluent.core.utils.networking import get_uds_path, is_localhost from ansys.platform.instancemanagement import Instance from ansys.tools.common.cyberchannel import create_channel @@ -263,9 +264,9 @@ def list_values(self) -> dict: def _get_ip_and_port(ip: str | None = None, port: int | None = None) -> (str, int): if not ip: - ip = pyfluent.config.launch_fluent_ip or "127.0.0.1" + ip = config.launch_fluent_ip or "127.0.0.1" if not port: - port = pyfluent.config.launch_fluent_port + port = config.launch_fluent_port if not port: raise PortNotProvided() return ip, port @@ -344,11 +345,11 @@ def __init__(self, create_grpc_service, error_state): self._app_utilities_service = create_grpc_service( AppUtilitiesService, error_state ) - match pyfluent.FluentVersion(self.scheme_eval.version): - case v if v < pyfluent.FluentVersion.v252: + match FluentVersion(self.scheme_eval.version): + case v if v < FluentVersion.v252: self._app_utilities = AppUtilitiesOld(self.scheme_eval) - case pyfluent.FluentVersion.v252: + case FluentVersion.v252: self._app_utilities = AppUtilitiesV252( self._app_utilities_service, self.scheme_eval ) @@ -543,7 +544,7 @@ def __init__( # At this point, the server must be running. If the following check_health() # throws, we should not proceed. # TODO: Show user-friendly error message. - if pyfluent.config.check_health: + if config.check_health: try: self._health_check.check_health() except RuntimeError: @@ -858,7 +859,7 @@ def exit( ) if timeout is None: - config_timeout = pyfluent.config.force_exit_timeout + config_timeout = config.force_exit_timeout if config_timeout is not None: logger.debug(f"Found force_exit_timeout config: '{config_timeout}'") try: diff --git a/src/ansys/fluent/core/launcher/fluent_container.py b/src/ansys/fluent/core/launcher/fluent_container.py index 4be35d9d3bea..ac9d22fd242d 100644 --- a/src/ansys/fluent/core/launcher/fluent_container.py +++ b/src/ansys/fluent/core/launcher/fluent_container.py @@ -79,13 +79,13 @@ from typing import Any, List import warnings -import ansys.fluent.core as pyfluent from ansys.fluent.core.docker.docker_compose import ComposeBasedLauncher from ansys.fluent.core.docker.utils import get_ghcr_fluent_image_name from ansys.fluent.core.launcher.error_handler import ( LaunchFluentError, ) from ansys.fluent.core.launcher.launcher_utils import ComposeConfig +from ansys.fluent.core.module_config import config from ansys.fluent.core.pyfluent_warnings import PyFluentDeprecationWarning from ansys.fluent.core.session import _parse_server_info_file from ansys.fluent.core.utils.deprecate import deprecate_arguments @@ -131,7 +131,7 @@ def dict_to_str(dict: dict) -> str: This is useful for logging purposes, to avoid printing sensitive information such as license server details. """ - if "environment" in dict and pyfluent.config.hide_log_secrets: + if "environment" in dict and config.hide_log_secrets: modified_dict = dict.copy() modified_dict.pop("environment") return pformat(modified_dict) @@ -257,7 +257,7 @@ def configure_container_dict( if file_transfer_service: mount_source = file_transfer_service.mount_source else: - mount_source = pyfluent.config.container_mount_source + mount_source = config.container_mount_source if "volumes" in container_dict: if len(container_dict["volumes"]) != 1: @@ -290,7 +290,7 @@ def configure_container_dict( if "working_dir" in container_dict: mount_target = container_dict["working_dir"] else: - mount_target = pyfluent.config.container_mount_target + mount_target = config.container_mount_target if "working_dir" in container_dict and mount_target: # working_dir will be set later to the final value of mount_target @@ -301,7 +301,7 @@ def configure_container_dict( if not mount_target: logger.debug("No container 'mount_target' specified, using default value.") - mount_target = pyfluent.config.container_mount_target + mount_target = config.container_mount_target if "volumes" not in container_dict: container_dict.update(volumes=[f"{mount_source}:{mount_target}"]) @@ -326,8 +326,8 @@ def configure_container_dict( if not port_mapping and "ports" in container_dict: # take the specified 'port', OR the first port value from the specified 'ports', for Fluent to use port_mapping = container_dict["ports"] - if not port_mapping and pyfluent.config.launch_fluent_port: - port = pyfluent.config.launch_fluent_port + if not port_mapping and config.launch_fluent_port: + port = config.launch_fluent_port port_mapping = {port: port} if not port_mapping: port = get_free_port() @@ -355,7 +355,7 @@ def configure_container_dict( ) if "labels" not in container_dict: - test_name = pyfluent.config.test_name + test_name = config.test_name container_dict.update( labels={"test_name": test_name}, ) @@ -400,14 +400,13 @@ def configure_container_dict( if not fluent_image: if not image_tag: - image_tag = pyfluent.config.fluent_image_tag + image_tag = config.fluent_image_tag if not image_name and image_tag: - image_name = ( - pyfluent.config.fluent_image_name - or get_ghcr_fluent_image_name(image_tag) + image_name = config.fluent_image_name or get_ghcr_fluent_image_name( + image_tag ) if not image_tag or not image_name: - fluent_image = pyfluent.config.fluent_container_name + fluent_image = config.fluent_container_name elif image_tag and image_name: if image_tag.startswith("sha"): fluent_image = f"{image_name}@{image_tag}" @@ -418,19 +417,19 @@ def configure_container_dict( container_dict["fluent_image"] = fluent_image - if not pyfluent.config.fluent_automatic_transcript: + if not config.fluent_automatic_transcript: if "environment" not in container_dict: container_dict["environment"] = {} container_dict["environment"]["FLUENT_NO_AUTOMATIC_TRANSCRIPT"] = "1" - if pyfluent.config.launch_fluent_ip or pyfluent.config.remoting_server_address: + if config.launch_fluent_ip or config.remoting_server_address: if "environment" not in container_dict: container_dict["environment"] = {} container_dict["environment"]["REMOTING_SERVER_ADDRESS"] = ( - pyfluent.config.launch_fluent_ip or pyfluent.config.remoting_server_address + config.launch_fluent_ip or config.remoting_server_address ) - if pyfluent.config.launch_fluent_skip_password_check: + if config.launch_fluent_skip_password_check: if "environment" not in container_dict: container_dict["environment"] = {} container_dict["environment"]["FLUENT_LAUNCHED_FROM_PYFLUENT"] = "1" diff --git a/src/ansys/fluent/core/launcher/launch_options.py b/src/ansys/fluent/core/launcher/launch_options.py index f963168e9f89..c178f4c24432 100644 --- a/src/ansys/fluent/core/launcher/launch_options.py +++ b/src/ansys/fluent/core/launcher/launch_options.py @@ -270,7 +270,7 @@ def _get_fluent_launch_mode(start_container, container_dict, scheduler_options): fluent_launch_mode: LaunchMode Fluent launch mode. """ - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config if pypim.is_configured(): fluent_launch_mode = LaunchMode.PIM diff --git a/src/ansys/fluent/core/launcher/launcher.py b/src/ansys/fluent/core/launcher/launcher.py index b82544eb24f1..0f8c149a404c 100644 --- a/src/ansys/fluent/core/launcher/launcher.py +++ b/src/ansys/fluent/core/launcher/launcher.py @@ -32,7 +32,6 @@ from typing import Any, Dict from warnings import warn -import ansys.fluent.core as pyfluent from ansys.fluent.core._types import PathType from ansys.fluent.core.exceptions import DisallowedValuesError from ansys.fluent.core.fluent_connection import FluentConnection @@ -65,6 +64,7 @@ from ansys.fluent.core.launcher.slurm_launcher import SlurmFuture, SlurmLauncher from ansys.fluent.core.launcher.standalone_launcher import StandaloneLauncher import ansys.fluent.core.launcher.watchdog as watchdog +from ansys.fluent.core.module_config import config from ansys.fluent.core.session_meshing import Meshing from ansys.fluent.core.session_pure_meshing import PureMeshing from ansys.fluent.core.session_solver import Solver @@ -124,7 +124,7 @@ def _show_gui_to_ui_mode(old_arg_val, **kwds): return UIMode.NO_GUI elif container_dict: return UIMode.NO_GUI - elif pyfluent.config.launch_fluent_container: + elif config.launch_fluent_container: return UIMode.NO_GUI else: return UIMode.GUI @@ -366,7 +366,7 @@ def launch_fluent( ) if start_timeout is None: - start_timeout = pyfluent.config.launch_fluent_timeout + start_timeout = config.launch_fluent_timeout def _mode_to_launcher_type(fluent_launch_mode: LaunchMode): launcher_mode_type = { @@ -404,7 +404,7 @@ def _mode_to_launcher_type(fluent_launch_mode: LaunchMode): ) common_args = launch_fluent_args.intersection(launcher_type_args) launcher_argvals = {arg: val for arg, val in argvals.items() if arg in common_args} - if pyfluent.config.start_watchdog is False: + if config.start_watchdog is False: launcher_argvals["start_watchdog"] = False launcher = launcher_type(**launcher_argvals) return launcher() diff --git a/src/ansys/fluent/core/launcher/launcher_utils.py b/src/ansys/fluent/core/launcher/launcher_utils.py index 76df08511780..61fa10221bc6 100644 --- a/src/ansys/fluent/core/launcher/launcher_utils.py +++ b/src/ansys/fluent/core/launcher/launcher_utils.py @@ -47,7 +47,7 @@ def __init__( use_docker_compose: bool | None = None, use_podman_compose: bool | None = None, ): - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config self._env_docker = config.use_docker_compose self._env_podman = config.use_podman_compose diff --git a/src/ansys/fluent/core/launcher/server_info.py b/src/ansys/fluent/core/launcher/server_info.py index 471ee981bec6..fae8f801eb62 100644 --- a/src/ansys/fluent/core/launcher/server_info.py +++ b/src/ansys/fluent/core/launcher/server_info.py @@ -53,7 +53,7 @@ def _get_server_info_file_names(use_tmpdir=True) -> tuple[str, str]: temporary directory if ``use_tmpdir`` is True, otherwise it is created in the current working directory. """ - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config server_info_dir = config.fluent_server_info_dir dir_ = ( @@ -98,7 +98,7 @@ def _get_server_info( ): """Get server connection information of an already running session. Returns (ip, port, password) or (unix_socket, password)""" - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config if not (ip and port) and not server_info_file_name: raise IpPortNotProvided() diff --git a/src/ansys/fluent/core/launcher/slurm_launcher.py b/src/ansys/fluent/core/launcher/slurm_launcher.py index 4f25aa0e2415..85ed7a636d6b 100644 --- a/src/ansys/fluent/core/launcher/slurm_launcher.py +++ b/src/ansys/fluent/core/launcher/slurm_launcher.py @@ -70,7 +70,6 @@ import time from typing import Any, Callable, Dict -from ansys.fluent.core import config from ansys.fluent.core._types import PathType from ansys.fluent.core.exceptions import InvalidArgument from ansys.fluent.core.launcher.error_warning_messages import ( @@ -93,6 +92,7 @@ ) from ansys.fluent.core.launcher.process_launch_string import _generate_launch_string from ansys.fluent.core.launcher.server_info import _get_server_info_file_names +from ansys.fluent.core.module_config import config from ansys.fluent.core.session_meshing import Meshing from ansys.fluent.core.session_pure_meshing import PureMeshing from ansys.fluent.core.session_solver import Solver @@ -521,8 +521,6 @@ def __init__( The allocated machines and core counts are queried from the scheduler environment and passed to Fluent. """ - from ansys.fluent.core import config - certificates_folder, insecure_mode = get_remote_grpc_options( certificates_folder, insecure_mode ) diff --git a/src/ansys/fluent/core/logger.py b/src/ansys/fluent/core/logger.py index e89432241de4..ca073d903510 100644 --- a/src/ansys/fluent/core/logger.py +++ b/src/ansys/fluent/core/logger.py @@ -25,7 +25,7 @@ import logging.config import os -from ansys.fluent.core import config +from ansys.fluent.core.module_config import config _logging_file_enabled = False diff --git a/src/ansys/fluent/core/search.py b/src/ansys/fluent/core/search.py index 8c66d1dba507..a804dfda9c70 100644 --- a/src/ansys/fluent/core/search.py +++ b/src/ansys/fluent/core/search.py @@ -33,7 +33,7 @@ import re import warnings -import ansys.fluent.core as pyfluent +from ansys.fluent.core.module_config import config from ansys.fluent.core.solver.error_message import closest_allowed_names from ansys.fluent.core.utils.fluent_version import ( FluentVersion, @@ -47,15 +47,11 @@ def _get_api_tree_data_file_path(): """Get API tree data file.""" - from ansys.fluent.core import config - return (config.codegen_outdir / "api_tree" / "api_objects.json").resolve() def get_api_tree_file_name(version: str) -> Path: """Get API tree file name.""" - from ansys.fluent.core import config - return (config.codegen_outdir / f"api_tree_{version}.pickle").resolve() @@ -146,8 +142,6 @@ def _write_api_tree_file(api_tree_data: dict, api_object_names: list): from nltk.corpus import wordnet as wn _download_nltk_data() - from ansys.fluent.core import config - json_file_folder = Path(os.path.join(config.codegen_outdir, "api_tree")) json_file_folder.mkdir(parents=True, exist_ok=True) @@ -264,7 +258,7 @@ def extract_results(api_data): ) results = final_results or all_results - if pyfluent.config.print_search_results: + if config.print_search_results: for result in results: print(result) elif results: diff --git a/src/ansys/fluent/core/services/api_upgrade.py b/src/ansys/fluent/core/services/api_upgrade.py index bcc6facd50f2..3df6a17631a2 100644 --- a/src/ansys/fluent/core/services/api_upgrade.py +++ b/src/ansys/fluent/core/services/api_upgrade.py @@ -45,7 +45,7 @@ def __init__( self._id = None def _can_advise(self) -> bool: - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config return not config.skip_api_upgrade_advice and self._mode == "solver" diff --git a/src/ansys/fluent/core/services/datamodel_se.py b/src/ansys/fluent/core/services/datamodel_se.py index c680de4618fd..f431df1e7d89 100644 --- a/src/ansys/fluent/core/services/datamodel_se.py +++ b/src/ansys/fluent/core/services/datamodel_se.py @@ -35,8 +35,8 @@ from ansys.api.fluent.v0 import datamodel_se_pb2 as DataModelProtoModule from ansys.api.fluent.v0 import datamodel_se_pb2_grpc as DataModelGrpcModule from ansys.api.fluent.v0.variant_pb2 import Variant -import ansys.fluent.core as pyfluent from ansys.fluent.core.data_model_cache import DataModelCache, NameKey +from ansys.fluent.core.module_config import config from ansys.fluent.core.services.interceptors import ( BatchInterceptor, ErrorStateInterceptor, @@ -513,9 +513,7 @@ def __init__( self.event_streaming = None self.subscriptions = SubscriptionList() self.file_transfer_service = file_transfer_service - self.cache = ( - DataModelCache() if pyfluent.config.datamodel_use_state_cache else None - ) + self.cache = DataModelCache() if config.datamodel_use_state_cache else None self.version = version def get_attribute_value(self, rules: str, path: str, attribute: str) -> ValueT: @@ -1098,10 +1096,7 @@ def get_attr(self, attrib: str) -> Any: Any Value of the attribute. """ - if ( - pyfluent.config.datamodel_use_attr_cache - and self.rules != "meshing_workflow" - ): + if config.datamodel_use_attr_cache and self.rules != "meshing_workflow": return self._get_cached_attr(attrib) return self._get_remote_attr(attrib) diff --git a/src/ansys/fluent/core/services/health_check.py b/src/ansys/fluent/core/services/health_check.py index 3010c44f50cb..35fc38d9dc6a 100644 --- a/src/ansys/fluent/core/services/health_check.py +++ b/src/ansys/fluent/core/services/health_check.py @@ -30,7 +30,7 @@ from grpc_health.v1 import health_pb2 as HealthCheckModule from grpc_health.v1 import health_pb2_grpc as HealthCheckGrpcModule -import ansys.fluent.core as pyfluent +from ansys.fluent.core.module_config import config from ansys.fluent.core.services.interceptors import ( BatchInterceptor, ErrorStateInterceptor, @@ -82,7 +82,7 @@ def check_health(self) -> Status: response = self._stub.Check( request, metadata=self._metadata, - timeout=pyfluent.config.check_health_timeout, + timeout=config.check_health_timeout, ) return HealthCheckService.Status(response.status) diff --git a/src/ansys/fluent/core/services/interceptors.py b/src/ansys/fluent/core/services/interceptors.py index 7e64ed5b68f5..fcdddc6a98b9 100644 --- a/src/ansys/fluent/core/services/interceptors.py +++ b/src/ansys/fluent/core/services/interceptors.py @@ -40,7 +40,7 @@ def _upper_snake_case_to_camel_case(name: str) -> str: def _truncate_grpc_str(message: Message) -> str: - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config truncate_len = config.grpc_log_bytes_limit // 5 message_bytes = message.ByteSize() @@ -68,7 +68,7 @@ def _intercept_call( client_call_details: grpc.ClientCallDetails, request: Any, ) -> Any: - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config network_logger.debug( f"GRPC_TRACE: RPC = {client_call_details.method}, request = {_truncate_grpc_str(request)}" diff --git a/src/ansys/fluent/core/session_pure_meshing.py b/src/ansys/fluent/core/session_pure_meshing.py index 9616e66cb7dc..28141cd36254 100644 --- a/src/ansys/fluent/core/session_pure_meshing.py +++ b/src/ansys/fluent/core/session_pure_meshing.py @@ -26,11 +26,11 @@ import os from typing import Any, Dict -import ansys.fluent.core as pyfluent from ansys.fluent.core._types import PathType from ansys.fluent.core.data_model_cache import DataModelCache, NameKey from ansys.fluent.core.exceptions import BetaFeaturesNotEnabled from ansys.fluent.core.fluent_connection import FluentConnection +from ansys.fluent.core.module_config import config from ansys.fluent.core.services import SchemeEval from ansys.fluent.core.session import BaseSession from ansys.fluent.core.session_base_meshing import BaseMeshing @@ -125,7 +125,7 @@ def __init__( self.datamodel_streams[rules] = stream stream.start( rules=rules, - no_commands_diff_state=pyfluent.config.datamodel_use_nocommands_diff_state, + no_commands_diff_state=config.datamodel_use_nocommands_diff_state, ) self._fluent_connection.register_finalizer_cb(stream.stop) diff --git a/src/ansys/fluent/core/session_shared.py b/src/ansys/fluent/core/session_shared.py index cdd7ca3b881e..16d518574016 100644 --- a/src/ansys/fluent/core/session_shared.py +++ b/src/ansys/fluent/core/session_shared.py @@ -24,10 +24,11 @@ import logging -import ansys.fluent.core as pyfluent +from ansys.fluent.core.module_config import config from ansys.fluent.core.pyfluent_warnings import warning_for_fluent_dev_version from ansys.fluent.core.services.datamodel_se import PyMenuGeneric from ansys.fluent.core.services.datamodel_tui import TUIMenu +from ansys.fluent.core.utils import load_module _CODEGEN_MSG_DATAMODEL = ( "Currently calling the datamodel API in a generic manner. " @@ -47,9 +48,7 @@ def _make_tui_module(session, module_name): try: - from ansys.fluent.core import config - - tui_module = pyfluent.utils.load_module( + tui_module = load_module( f"{module_name}_tui_{session._version}", config.codegen_outdir / module_name / f"tui_{session._version}.py", ) @@ -65,11 +64,10 @@ def _make_tui_module(session, module_name): def _make_datamodel_module(session, module_name): try: - from ansys.fluent.core import config from ansys.fluent.core.codegen.datamodelgen import datamodel_file_name_map file_name = datamodel_file_name_map[module_name] - module = pyfluent.utils.load_module( + module = load_module( f"{module_name}_{session._version}", config.codegen_outdir / f"datamodel_{session._version}" / f"{file_name}.py", ) diff --git a/src/ansys/fluent/core/session_solver.py b/src/ansys/fluent/core/session_solver.py index 89178e0cb185..ad7d904a9489 100644 --- a/src/ansys/fluent/core/session_solver.py +++ b/src/ansys/fluent/core/session_solver.py @@ -31,6 +31,7 @@ from ansys.api.fluent.v0 import svar_pb2 as SvarProtoModule import ansys.fluent.core as pyfluent from ansys.fluent.core.exceptions import BetaFeaturesNotEnabled +from ansys.fluent.core.module_config import config from ansys.fluent.core.pyfluent_warnings import PyFluentDeprecationWarning from ansys.fluent.core.services import SchemeEval, service_creator from ansys.fluent.core.services.field_data import ZoneInfo, ZoneType @@ -158,7 +159,7 @@ def _build_from_fluent_connection( ) #: Manage Fluent's solution monitors. self.monitors = MonitorsManager(fluent_connection._id, monitors_service) - if not pyfluent.config.disable_monitor_refresh_on_init: + if not config.disable_monitor_refresh_on_init: self.events.register_callback( (SolverEvent.SOLUTION_INITIALIZED, SolverEvent.DATA_LOADED), self.monitors.refresh, @@ -270,7 +271,7 @@ def _interrupt(cls, command): "solution/run-calculation/calculate", "solution/run-calculation/dual-time-iterate", ] - if pyfluent.config.support_solver_interrupt: + if config.support_solver_interrupt: if command.path in interruptible_commands: command._root.solution.run_calculation.interrupt() diff --git a/src/ansys/fluent/core/solver/flobject.py b/src/ansys/fluent/core/solver/flobject.py index 3c30ac9a8a0c..5fb82cf9c500 100644 --- a/src/ansys/fluent/core/solver/flobject.py +++ b/src/ansys/fluent/core/solver/flobject.py @@ -1784,7 +1784,7 @@ class BaseCommand(Action): def _execute_command(self, *args, **kwds): """Execute a command with the specified positional and keyword arguments.""" - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config if self.flproxy.is_interactive_mode(): prompt = self.flproxy.get_command_confirmation_prompt( @@ -2394,14 +2394,15 @@ def get_root( RuntimeError If hash values are inconsistent. """ - from ansys.fluent.core import config, utils + from ansys.fluent.core.module_config import config + from ansys.fluent.core.utils import load_module as _load_module if config.use_runtime_python_classes: obj_info = flproxy.get_static_info() root_cls, _ = get_cls("", obj_info, version=version) else: try: - settings = utils.load_module( + settings = _load_module( f"settings_{version}", config.codegen_outdir / "solver" / f"settings_{version}.py", ) diff --git a/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py b/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py index 2cefc28c55fd..4c32f3441665 100644 --- a/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py +++ b/src/ansys/fluent/core/streaming_services/datamodel_event_streaming.py @@ -61,7 +61,7 @@ def unregister_callback(self, tag: str): def _process_streaming(self, id, stream_begin_method, started_evt, *args, **kwargs): """Processes datamodel events.""" - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config request = DataModelProtoModule.EventRequest(*args, **kwargs) responses = self._streaming_service.begin_streaming( diff --git a/src/ansys/fluent/core/streaming_services/datamodel_streaming.py b/src/ansys/fluent/core/streaming_services/datamodel_streaming.py index fb8028c0e293..65ad102419d1 100644 --- a/src/ansys/fluent/core/streaming_services/datamodel_streaming.py +++ b/src/ansys/fluent/core/streaming_services/datamodel_streaming.py @@ -27,7 +27,7 @@ from google.protobuf.json_format import MessageToDict from ansys.api.fluent.v0 import datamodel_se_pb2 -import ansys.fluent.core as pyfluent +from ansys.fluent.core.module_config import config from ansys.fluent.core.streaming_services.streaming import StreamingService network_logger: logging.Logger = logging.getLogger("pyfluent.networking") @@ -57,9 +57,7 @@ def _process_streaming( """Processes datamodel events.""" data_model_request = datamodel_se_pb2.DataModelRequest(*args, **kwargs) data_model_request.rules = rules - data_model_request.returnstatechanges = ( - pyfluent.config.datamodel_return_state_changes - ) + data_model_request.returnstatechanges = config.datamodel_return_state_changes if no_commands_diff_state: data_model_request.diffstate = datamodel_se_pb2.DIFFSTATE_NOCOMMANDS responses = self._streaming_service.begin_streaming( diff --git a/src/ansys/fluent/core/system_coupling.py b/src/ansys/fluent/core/system_coupling.py index 556f5253532d..87a10f02f34d 100644 --- a/src/ansys/fluent/core/system_coupling.py +++ b/src/ansys/fluent/core/system_coupling.py @@ -29,7 +29,7 @@ from defusedxml.ElementTree import fromstring -import ansys.fluent.core as pyfluent +from ansys.fluent.core.module_config import config from ansys.fluent.core.utils.fluent_version import FluentVersion @@ -252,9 +252,7 @@ def get_scp_string() -> str: # the local Fluent container working directory will correspond to # pyfluent.EXAMPLES_PATH in the host, so that is where the SCP file # will be written. - examples_path_scp = os.path.join( - pyfluent.config.examples_path, scp_file_name - ) + examples_path_scp = os.path.join(config.examples_path, scp_file_name) if os.path.exists(examples_path_scp): scp_file_name = examples_path_scp diff --git a/src/ansys/fluent/core/utils/data_transfer.py b/src/ansys/fluent/core/utils/data_transfer.py index 9d29988896d2..b6c372bae35a 100644 --- a/src/ansys/fluent/core/utils/data_transfer.py +++ b/src/ansys/fluent/core/utils/data_transfer.py @@ -27,7 +27,7 @@ import os from pathlib import Path, PurePosixPath -import ansys.fluent.core as pyfluent +from ansys.fluent.core.module_config import config from ansys.fluent.core.utils.execution import asynchronous network_logger = logging.getLogger("pyfluent.networking") @@ -116,7 +116,7 @@ def transfer_case( """ inside_container = source_instance.connection_properties.inside_container if not workdir: - workdir = Path(pyfluent.config.examples_path) + workdir = Path(config.examples_path) else: workdir = Path(workdir) if inside_container: @@ -124,9 +124,9 @@ def transfer_case( network_logger.warning( "Fluent is running inside a container, and no 'container_workdir' was specified for " "'transfer_case'. Assuming that the default container mount path " - f"'{pyfluent.config.container_mount_target}' is being used. " + f"'{config.container_mount_target}' is being used. " ) - container_workdir = PurePosixPath(pyfluent.config.container_mount_target) + container_workdir = PurePosixPath(config.container_mount_target) network_logger.debug(f"container_workdir: {container_workdir}") else: container_workdir = PurePosixPath(container_workdir) diff --git a/src/ansys/fluent/core/utils/fluent_version.py b/src/ansys/fluent/core/utils/fluent_version.py index d7cbb56ff74d..9d0ff8692230 100644 --- a/src/ansys/fluent/core/utils/fluent_version.py +++ b/src/ansys/fluent/core/utils/fluent_version.py @@ -31,6 +31,7 @@ from typing import Any import ansys.fluent.core as pyfluent +from ansys.fluent.core.module_config import config class AnsysVersionNotFound(RuntimeError): @@ -159,7 +160,7 @@ def current_release(cls): FluentVersion FluentVersion member corresponding to the latest release. """ - return cls(pyfluent.config.fluent_release_version) + return cls(config.fluent_release_version) @classmethod def current_dev(cls): @@ -170,7 +171,7 @@ def current_dev(cls): FluentVersion FluentVersion member corresponding to the latest development version. """ - return cls(pyfluent.config.fluent_dev_version) + return cls(config.fluent_dev_version) @classmethod def minimum_supported(cls): diff --git a/src/ansys/fluent/core/utils/networking.py b/src/ansys/fluent/core/utils/networking.py index 675ea434b42d..d459b9921c55 100644 --- a/src/ansys/fluent/core/utils/networking.py +++ b/src/ansys/fluent/core/utils/networking.py @@ -82,7 +82,7 @@ def find_remoting_ip() -> str: str remoting ip address """ - from ansys.fluent.core import config + from ansys.fluent.core.module_config import config all_ips = [ addrinfo[-1][0] From 37fa7352b3b7de9d9eadbbbe81706b0e4c9745e6 Mon Sep 17 00:00:00 2001 From: mayankansys Date: Thu, 26 Mar 2026 14:49:24 +0530 Subject: [PATCH 2/3] added the doc string --- src/ansys/fluent/core/rest/README.md | 291 +++++++++ src/ansys/fluent/core/rest/__init__.py | 44 ++ src/ansys/fluent/core/rest/client.py | 388 ++++++++++++ src/ansys/fluent/core/rest/mock_server.py | 585 ++++++++++++++++++ src/ansys/fluent/core/rest/tests/__init__.py | 22 + src/ansys/fluent/core/rest/tests/conftest.py | 39 ++ .../core/rest/tests/test_rest_client.py | 324 ++++++++++ 7 files changed, 1693 insertions(+) create mode 100644 src/ansys/fluent/core/rest/README.md create mode 100644 src/ansys/fluent/core/rest/__init__.py create mode 100644 src/ansys/fluent/core/rest/client.py create mode 100644 src/ansys/fluent/core/rest/mock_server.py create mode 100644 src/ansys/fluent/core/rest/tests/__init__.py create mode 100644 src/ansys/fluent/core/rest/tests/conftest.py create mode 100644 src/ansys/fluent/core/rest/tests/test_rest_client.py diff --git a/src/ansys/fluent/core/rest/README.md b/src/ansys/fluent/core/rest/README.md new file mode 100644 index 000000000000..5059a0ab80c8 --- /dev/null +++ b/src/ansys/fluent/core/rest/README.md @@ -0,0 +1,291 @@ +# PyFluent REST Settings Transport — Step 1 Exploration + +## What Is This? + +Fluent is a simulation solver. PyFluent is the Python library that lets you +control Fluent from code — change settings, run simulations, read results. + +Normally PyFluent talks to Fluent over **gRPC**, which is Google's high-speed +binary communication protocol. It works great, but it ties PyFluent tightly to +gRPC. + +The goal of this work (**Issue #4959**) is to prove that PyFluent can work just +as well over a plain **REST API** (the same kind of API that every web service +uses). If we can do that, PyFluent becomes more flexible — it can talk to +Fluent however it needs to, without any single transport being baked in. + +This folder contains **Step 1**: a standalone Python REST client and a matching +mock server, so we can develop and test the idea without a real Fluent instance. + +--- + +## The Big Picture (Plain English) + +Think of it like ordering food: + +| Concept | Restaurant Analogy | +|---|---| +| **Fluent solver** | The kitchen — it does the actual cooking (simulation) | +| **PyFluent settings** | The menu — a structured list of things you can configure | +| **gRPC transport** | A private phone line between the waiter and the kitchen | +| **REST transport** | A standard walkie-talkie anyone can use | +| **`FluentRestClient`** | The waiter who speaks walkie-talkie | +| **`FluentRestMockServer`** | A fake kitchen used for training waiters | + +Right now PyFluent only has the private phone line (gRPC). This project adds +the walkie-talkie (REST) as an equally valid option. + +--- + +## Folder Structure + +``` +src/ansys/fluent/core/rest/ +│ +├── __init__.py ← Entry point. Import FluentRestClient and +│ FluentRestMockServer from here. +│ +├── client.py ← The REST client. +│ Speaks HTTP to a Fluent REST server. +│ Uses only Python's built-in urllib — no extra packages. +│ +├── mock_server.py ← A fake Fluent server for testing. +│ Runs in memory. Uses only Python's built-in +│ http.server — no Flask, no extra packages. +│ +├── README.md ← This file. +│ +└── tests/ + ├── conftest.py ← Shared test fixtures (start/stop the mock server). + └── test_rest_client.py ← 40 tests covering every feature. +``` + +--- + +## How It Works + +### 1. The Settings Tree + +Fluent has hundreds of settings organised like a folder tree: + +``` +setup/ + models/ + energy/ + enabled ← True or False + viscous/ + model ← "k-epsilon", "laminar", etc. + boundary_conditions/ + velocity_inlet/ + inlet/ + momentum/ + velocity_magnitude/ + value ← 1.0 (m/s) +solution/ + run_calculation/ + iter_count ← 100 +``` + +Every setting is identified by its **path** — a slash-separated string like +`"setup/models/energy/enabled"`. + +### 2. The REST API Contract + +`FluentRestClient` talks to a server using simple HTTP requests. Each +operation maps to one HTTP call: + +| What you want to do | HTTP call | +|---|---| +| Read a setting | `GET /settings/var?path=setup/models/energy/enabled` | +| Write a setting | `PUT /settings/var?path=setup/models/energy/enabled` + body `{"value": false}` | +| Get the full settings tree structure | `GET /settings/static-info` | +| List child objects (e.g. boundary names) | `GET /settings/object-names?path=setup/boundary_conditions/velocity_inlet` | +| Create a new named object | `POST /settings/create?path=...&name=wall-1` | +| Delete a named object | `DELETE /settings/object?path=...&name=wall-1` | +| Rename a named object | `PATCH /settings/rename?path=...` + body `{"old": "wall-1", "new": "wall-2"}` | +| Count items in a list | `GET /settings/list-size?path=...` | +| Run a command (e.g. initialise) | `POST /settings/commands/initialize?path=solution/initialization` | +| Run a query (e.g. get zone names) | `POST /settings/queries/get_zone_names?path=...` | +| Get attribute metadata | `GET /settings/attrs?path=...&attrs=allowed-values` | + +All responses come back as **JSON**. + +> **Note:** This is a *provisional* contract designed to match the shape of +> Fluent's gRPC settings API. When Ansys publishes the official Fluent REST +> API spec, only the endpoint paths in `client.py` need updating — the rest of +> PyFluent stays the same. + +### 3. The Mock Server + +Because the real Fluent REST API does not exist yet, `FluentRestMockServer` +acts as a stand-in. It: + +- Runs in a background thread inside the same Python process. +- Stores all settings in a Python dictionary (in memory). +- Comes pre-loaded with a small but realistic set of solver settings. +- Starts on a random free port so multiple tests can run at the same time without + clashing. + +### 4. The flobject Connection (Why This Matters) + +PyFluent's settings system is built around a module called **flobject**. When +you write: + +```python +solver.settings.setup.models.energy.enabled = True +``` + +`flobject` is the code that makes `solver.settings` feel like a real Python +object tree. Under the hood it calls through a **proxy** object. + +Currently that proxy is `SettingsService` (the gRPC one). But `flobject` does +not care *how* the proxy works — it just calls methods like `get_var`, +`set_var`, `execute_cmd`, etc. + +`FluentRestClient` has **exactly the same method signatures**, so in Step 2 of +this project it can be dropped in as the proxy directly: + +```python +# Today (gRPC) +root = flobject.get_root(flproxy=grpc_settings_service, ...) + +# Tomorrow (REST) — one line change +root = flobject.get_root(flproxy=FluentRestClient("http://localhost:8000"), ...) +``` + +No changes to `flobject` at all. + +--- + +## Quick Start + +```python +from ansys.fluent.core.rest import FluentRestClient, FluentRestMockServer + +# Start a fake Fluent server (for demo/testing) +server = FluentRestMockServer() +server.start() + +# Connect a client +client = FluentRestClient(server.base_url) + +# Read a setting +print(client.get_var("setup/models/energy/enabled")) # True + +# Change a setting +client.set_var("setup/models/energy/enabled", False) +print(client.get_var("setup/models/energy/enabled")) # False + +# List boundary conditions +print(client.get_object_names("setup/boundary_conditions/velocity_inlet")) +# ['inlet'] + +# Create a new wall boundary +client.create("setup/boundary_conditions/wall", "wall-1") + +# Run a command +reply = client.execute_cmd("solution/initialization", "initialize") +print(reply) # 'Initialization complete' + +# Check the full settings tree structure +info = client.get_static_info() +print(info["type"]) # 'group' +print(list(info["children"])) # ['setup', 'solution'] + +# Stop the server when done +server.stop() +``` + +### Use as a context manager (recommended) + +```python +with FluentRestMockServer() as server: + client = FluentRestClient(server.base_url) + print(client.get_var("solution/run_calculation/iter_count")) # 100 +# Server is automatically stopped here +``` + +### Pointing at a real server + +When the real Fluent REST server is available, just change the URL: + +```python +client = FluentRestClient("http://my-fluent-machine:8000", auth_token="my-token") +``` + +Everything else stays the same. + +--- + +## Running the Tests + +From the `pyfluent/` directory: + +```bash +pytest src/ansys/fluent/core/rest/tests/ -v +``` + +No Fluent installation needed. All 40 tests run against the in-memory mock +server. + +What the tests cover: + +| Test class | What it checks | +|---|---| +| `TestMockServer` | Server lifecycle (start, stop, context manager, independent state) | +| `TestGetStaticInfo` | Settings tree structure returned correctly | +| `TestGetSetVar` | Read/write all value types (bool, string, int, float, dict, list) | +| `TestGetAttrs` | Attribute metadata (allowed values, active flag) | +| `TestNamedObjects` | Create, list, delete, rename named objects | +| `TestListSize` | List-object size queries | +| `TestExecuteCmd` | Command execution (registered + unregistered) | +| `TestExecuteQuery` | Query execution (registered + unregistered) | +| `TestHelpers` | `is_interactive_mode()`, `has_wildcard()` | +| `TestFluentRestError` | Error representation and status codes | + +--- + +## No Extra Dependencies + +Both `FluentRestClient` and `FluentRestMockServer` use **only Python's standard +library**: + +| Need | Module used | +|---|---| +| HTTP client | `urllib.request`, `urllib.parse`, `urllib.error` | +| HTTP server | `http.server`, `socketserver` | +| Background thread | `threading` | +| JSON | `json` | + +Nothing to `pip install` beyond what PyFluent already requires. + +--- + +## Key Design Decisions + +| Decision | Reason | +|---|---| +| Endpoint paths are in one `_Endpoints` class | Easy to update when the real Fluent REST spec arrives | +| `FluentRestClient` method names match the gRPC `SettingsService` | Drop-in replacement for `flobject` in Step 2 | +| Mock server uses random port by default | Tests can run in parallel without port conflicts | +| Each mock server instance has its own store (deep copy) | Tests are fully isolated from each other | +| `has_wildcard()` runs locally (no HTTP call) | Simple string check — no need to ask the server | +| `is_interactive_mode()` always returns `False` | REST is non-interactive by nature | + +--- + +## What Comes Next (Step 2) + +Step 1 (this folder) proved the REST client works in isolation. + +Step 2 will wire it into the full PyFluent stack: + +1. **`my-simple-launcher`** — a tiny launcher that connects to a REST-enabled + Fluent instead of starting gRPC. +2. **`my-session-class`** — a lightweight session that holds a + `FluentRestClient` instead of a `SettingsService`. +3. **`flobject` unchanged** — pass `FluentRestClient` as `flproxy` and the + entire `solver.settings` tree works transparently over REST. + +The end result: one line of code changes the transport from gRPC to REST. The +user never needs to know which one is running underneath. diff --git a/src/ansys/fluent/core/rest/__init__.py b/src/ansys/fluent/core/rest/__init__.py new file mode 100644 index 000000000000..44fdb56d1669 --- /dev/null +++ b/src/ansys/fluent/core/rest/__init__.py @@ -0,0 +1,44 @@ +# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""REST-based PyFluent settings client (Step 1 exploration). + +This package provides a transport-agnostic alternative to the gRPC +``SettingsService``. It contains: + +* :class:`~ansys.fluent.core.rest.client.FluentRestClient` – a pure-Python + HTTP client whose public interface is identical to the duck-typed proxy + expected by :mod:`~ansys.fluent.core.solver.flobject`. Written against a + provisional REST API contract; the contract is documented in ``client.py`` + and can be adjusted to match the real Fluent REST API when it becomes + available. + +* :class:`~ansys.fluent.core.rest.mock_server.FluentRestMockServer` – a + lightweight in-process HTTP server (stdlib only, no Flask) that implements + the same provisional REST contract backed by an in-memory settings store. + Useful for local development, unit-tests, and demos without a running Fluent + instance. +""" + +from ansys.fluent.core.rest.client import FluentRestClient +from ansys.fluent.core.rest.mock_server import FluentRestMockServer + +__all__ = ["FluentRestClient", "FluentRestMockServer"] diff --git a/src/ansys/fluent/core/rest/client.py b/src/ansys/fluent/core/rest/client.py new file mode 100644 index 000000000000..5b30bd9b42fe --- /dev/null +++ b/src/ansys/fluent/core/rest/client.py @@ -0,0 +1,388 @@ +# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Pure-Python REST client for Fluent solver settings. + +Provisional REST API Contract +------------------------------ +All endpoints share the base URL ``/settings``. JSON is used for +both request bodies and response payloads. When a real Fluent REST API is +published, only the constants in :data:`_Endpoints` and the helper +:meth:`FluentRestClient._request` need updating. + +Endpoint summary +~~~~~~~~~~~~~~~~ + +.. code-block:: text + + GET /settings/static-info + → { "info": } + + GET /settings/var?path= + → { "value": } + + PUT /settings/var?path= + body: { "value": } + → {} + + GET /settings/attrs?path=&attrs=&attrs=[&recursive=true] + → { "attrs": , "group_children": {...} } (group_children + only present when recursive=true) + + GET /settings/object-names?path= + → { "names": [, ...] } + + POST /settings/create?path=&name= + → {} + + DELETE /settings/object?path=&name= + → {} + + PATCH /settings/rename?path= + body: { "new": , "old": } + → {} + + GET /settings/list-size?path= + → { "size": } + + POST /settings/commands/?path= + body: { : , ... } + → { "reply": } + + POST /settings/queries/?path= + body: { : , ... } + → { "reply": } + +Authentication +~~~~~~~~~~~~~~ +When *auth_token* is supplied, every request carries the header:: + + Authorization: Bearer + +Error handling +~~~~~~~~~~~~~~ +HTTP 4xx / 5xx responses raise :class:`FluentRestError`. +""" + +import json +from typing import Any +import urllib.error +import urllib.parse +import urllib.request + + +class FluentRestError(RuntimeError): + """Raised when the Fluent REST server returns an error response. + + Parameters + ---------- + status : int + HTTP status code. + message : str + Error detail from the response body, or the raw reason phrase. + """ + + def __init__(self, status: int, message: str) -> None: + self.status = status + super().__init__(f"HTTP {status}: {message}") + + +class _Endpoints: + """Centralised endpoint paths – update here when the real spec ships.""" + + BASE = "settings" + STATIC_INFO = "settings/static-info" + VAR = "settings/var" + ATTRS = "settings/attrs" + OBJECT_NAMES = "settings/object-names" + CREATE = "settings/create" + DELETE = "settings/object" + RENAME = "settings/rename" + LIST_SIZE = "settings/list-size" + COMMANDS = "settings/commands" + QUERIES = "settings/queries" + + +class FluentRestClient: + """Pure-Python HTTP client for Fluent solver settings. + + The public method signatures are intentionally identical to the duck-typed + *flproxy* interface consumed by + :func:`~ansys.fluent.core.solver.flobject.get_root`, so this client can be + passed directly as *flproxy* in Step 2 of the componentisation work. + + Parameters + ---------- + base_url : str + Root URL of the Fluent REST server, e.g. ``"http://localhost:8000"``. + A trailing slash is stripped automatically. + auth_token : str, optional + Bearer token added to every request as ``Authorization: Bearer …``. + timeout : float, optional + Socket timeout in seconds for every request. Defaults to ``30.0``. + + Examples + -------- + >>> from ansys.fluent.core.rest import FluentRestClient, FluentRestMockServer + >>> server = FluentRestMockServer() + >>> server.start() + >>> client = FluentRestClient(f"http://localhost:{server.port}") + >>> client.get_var("setup/models/energy/enabled") + True + >>> client.set_var("setup/models/energy/enabled", False) + >>> server.stop() + """ + + def __init__( + self, + base_url: str, + *, + auth_token: str | None = None, + timeout: float = 30.0, + ) -> None: + parsed = urllib.parse.urlparse(base_url) + if parsed.scheme not in {"http", "https"}: + raise ValueError("base_url scheme must be http or https") + if not parsed.netloc: + raise ValueError("base_url must include host") + self._base_url = base_url.rstrip("/") + self._auth_token = auth_token + self._timeout = timeout + + # ------------------------------------------------------------------ + # Internal helpers + # ------------------------------------------------------------------ + + def _url(self, endpoint: str, **query_params) -> str: + """Build a full URL from *endpoint* and optional query params.""" + url = f"{self._base_url}/{endpoint}" + # urllib.parse.urlencode does not support multi-value keys natively + # when passed a dict, but doseq=True handles list values. + if query_params: + # Convert single values to strings; keep lists as-is for doseq. + encoded = urllib.parse.urlencode( + {k: v for k, v in query_params.items() if v is not None}, + doseq=True, + ) + url = f"{url}?{encoded}" + return url + + def _request( + self, + method: str, + endpoint: str, + *, + query_params: dict | None = None, + body: Any = None, + ) -> Any: + """Send an HTTP request and return the decoded JSON response body. + + Parameters + ---------- + method : str + HTTP verb (``"GET"``, ``"PUT"``, ``"POST"``, ``"PATCH"``, + ``"DELETE"``). + endpoint : str + Path relative to *base_url*, e.g. ``"settings/var"``. + query_params : dict, optional + Mapping of URL query parameters. List values produce repeated + keys (``?attrs=a&attrs=b``). + body : any JSON-serialisable object, optional + Request body; encoded as UTF-8 JSON. + + Returns + ------- + dict + Decoded JSON response, or ``{}`` for empty 2xx bodies. + + Raises + ------ + FluentRestError + For any HTTP 4xx or 5xx response. + """ + url = self._url(endpoint, **(query_params or {})) + data: bytes | None = None + headers: dict[str, str] = {} + + if body is not None: + data = json.dumps(body).encode("utf-8") + headers["Content-Type"] = "application/json" + + if self._auth_token: + headers["Authorization"] = f"Bearer {self._auth_token}" + + req = urllib.request.Request( + url, data=data, headers=headers, method=method.upper() + ) + try: + with urllib.request.urlopen( + req, timeout=self._timeout + ) as resp: # nosec B310 + raw = resp.read() + return json.loads(raw) if raw.strip() else {} + except urllib.error.HTTPError as exc: + try: + detail = json.loads(exc.read()).get("error", exc.reason) + except Exception: + detail = exc.reason + raise FluentRestError(exc.code, detail) from exc + + # ------------------------------------------------------------------ + # flobject proxy interface + # ------------------------------------------------------------------ + + def get_static_info(self) -> dict[str, Any]: + """Return the full static-info tree for all solver settings. + + Corresponds to ``GET /settings/static-info``. + """ + return self._request("GET", _Endpoints.STATIC_INFO)["info"] + + def get_var(self, path: str) -> Any: + """Return the current value of the setting at *path*. + + Corresponds to ``GET /settings/var?path=``. + """ + return self._request("GET", _Endpoints.VAR, query_params={"path": path})[ + "value" + ] + + def set_var(self, path: str, value: Any) -> None: + """Set the value of the setting at *path*. + + Corresponds to ``PUT /settings/var?path=`` with body + ``{"value": }``. + """ + self._request( + "PUT", + _Endpoints.VAR, + query_params={"path": path}, + body={"value": value}, + ) + + def get_attrs(self, path: str, attrs: list[str], recursive: bool = False) -> Any: + """Return the requested attributes for the setting at *path*. + + Corresponds to + ``GET /settings/attrs?path=&attrs=&attrs=[&recursive=true]``. + """ + return self._request( + "GET", + _Endpoints.ATTRS, + query_params={ + "path": path, + "attrs": attrs, + "recursive": str(recursive).lower(), + }, + ) + + def get_object_names(self, path: str) -> list[str]: + """Return the child named-object names at *path*. + + Corresponds to ``GET /settings/object-names?path=``. + """ + return self._request( + "GET", _Endpoints.OBJECT_NAMES, query_params={"path": path} + )["names"] + + def create(self, path: str, name: str) -> None: + """Create a named child object at *path*. + + Corresponds to ``POST /settings/create?path=&name=``. + """ + self._request( + "POST", _Endpoints.CREATE, query_params={"path": path, "name": name} + ) + + def delete(self, path: str, name: str) -> None: + """Delete the named child object at *path*. + + Corresponds to ``DELETE /settings/object?path=&name=``. + """ + self._request( + "DELETE", _Endpoints.DELETE, query_params={"path": path, "name": name} + ) + + def rename(self, path: str, new: str, old: str) -> None: + """Rename a child object at *path* from *old* to *new*. + + Corresponds to ``PATCH /settings/rename?path=`` with body + ``{"new": , "old": }``. + """ + self._request( + "PATCH", + _Endpoints.RENAME, + query_params={"path": path}, + body={"new": new, "old": old}, + ) + + def get_list_size(self, path: str) -> int: + """Return the number of elements in the list-object at *path*. + + Corresponds to ``GET /settings/list-size?path=``. + """ + return self._request("GET", _Endpoints.LIST_SIZE, query_params={"path": path})[ + "size" + ] + + def execute_cmd(self, path: str, command: str, **kwds) -> Any: + """Execute *command* at *path* with keyword arguments *kwds*. + + Corresponds to + ``POST /settings/commands/?path=`` with body + ``{: , ...}``. + """ + return self._request( + "POST", + f"{_Endpoints.COMMANDS}/{command}", + query_params={"path": path}, + body=kwds, + ).get("reply") + + def execute_query(self, path: str, query: str, **kwds) -> Any: + """Execute *query* at *path* with keyword arguments *kwds*. + + Corresponds to + ``POST /settings/queries/?path=`` with body + ``{: , ...}``. + """ + return self._request( + "POST", + f"{_Endpoints.QUERIES}/{query}", + query_params={"path": path}, + body=kwds, + ).get("reply") + + # ------------------------------------------------------------------ + # Additional proxy interface helpers (no server round-trip required) + # ------------------------------------------------------------------ + + def has_wildcard(self, name: str) -> bool: + """Return ``True`` if *name* contains an ``fnmatch``-style wildcard. + + Recognised wildcard characters: ``*``, ``?``, ``[``. + Performs the check locally – no server round-trip required. + """ + return any(c in name for c in ("*", "?", "[")) + + def is_interactive_mode(self) -> bool: + """Always returns ``False`` for a REST client.""" + return False diff --git a/src/ansys/fluent/core/rest/mock_server.py b/src/ansys/fluent/core/rest/mock_server.py new file mode 100644 index 000000000000..f2a10e9cea13 --- /dev/null +++ b/src/ansys/fluent/core/rest/mock_server.py @@ -0,0 +1,585 @@ +# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Lightweight in-process HTTP mock server for the provisional Fluent REST +settings API. + +Uses only the Python standard library (``http.server``, ``threading``, +``socketserver``). No Flask or any external packages are required. + +The server is backed by an in-memory *settings store* pre-populated with a +realistic slice of Fluent solver settings. It is intended for: + +* Unit-testing :class:`~ansys.fluent.core.rest.client.FluentRestClient` + without a running Fluent instance. +* Local development and demos. +* Acting as a reference implementation of the provisional REST contract. + +Usage +----- +:: + + from ansys.fluent.core.rest import FluentRestMockServer, FluentRestClient + + server = FluentRestMockServer() + server.start() # starts in a background thread + + client = FluentRestClient(f"http://localhost:{server.port}") + print(client.get_var("setup/models/energy/enabled")) # True + + server.stop() + +Pytest fixture +-------------- +:: + + import pytest + from ansys.fluent.core.rest import FluentRestMockServer, FluentRestClient + + @pytest.fixture() + def rest_client(): + server = FluentRestMockServer() + server.start() + yield FluentRestClient(f"http://localhost:{server.port}") + server.stop() +""" + +import copy +from http.server import BaseHTTPRequestHandler +import json +import socketserver +import threading +from typing import Any +import urllib.parse + +# --------------------------------------------------------------------------- +# Pre-populated settings store +# --------------------------------------------------------------------------- + +#: Default in-memory settings tree. Keys are slash-delimited Fluent paths. +_DEFAULT_VARS: dict[str, Any] = { + # General solver settings + "setup/general/solver/time": "steady", + "setup/general/solver/velocity_formulation": "absolute", + "setup/general/gravity/enabled": False, + # Energy model + "setup/models/energy/enabled": True, + # Viscous model + "setup/models/viscous/model": "k-epsilon", + "setup/models/viscous/k_epsilon_model": "standard", + # Boundary conditions – velocity inlet + "setup/boundary_conditions/velocity_inlet/inlet/momentum/velocity_magnitude/value": 1.0, + "setup/boundary_conditions/velocity_inlet/inlet/momentum/velocity_magnitude/units": "m/s", + # Boundary conditions – pressure outlet + "setup/boundary_conditions/pressure_outlet/outlet/momentum/gauge_pressure/value": 0.0, + # Solution controls + "solution/methods/p_v_coupling/scheme": "simple", + "solution/controls/under_relaxation/pressure": 0.3, + "solution/controls/under_relaxation/velocity": 0.7, + "solution/run_calculation/iter_count": 100, + "solution/initialization/initialization_methods": "standard", +} + +#: Named-object children for specific paths. +_DEFAULT_NAMED_OBJECTS: dict[str, list[str]] = { + "setup/boundary_conditions/velocity_inlet": ["inlet"], + "setup/boundary_conditions/pressure_outlet": ["outlet"], + "setup/models": [], +} + +#: List sizes for list-object paths. +_DEFAULT_LIST_SIZES: dict[str, int] = { + "solution/run_calculation/pseudo_time_settings/timestepping_parameters/profile_update_interval": 1, +} + +#: Attribute responses keyed by path. +#: Each value is a dict with optional keys ``attrs``, ``group_children``. +_DEFAULT_ATTRS: dict[str, dict] = { + "setup/models/energy/enabled": { + "attrs": {"allowed-values": [True, False], "active?": True}, + }, + "setup/models/viscous/model": { + "attrs": { + "allowed-values": ["laminar", "k-epsilon", "k-omega", "RSM"], + "active?": True, + }, + }, + "setup/general/solver/time": { + "attrs": { + "allowed-values": ["steady", "transient"], + "active?": True, + }, + }, +} + +#: Static info – a minimal subset of the full Fluent settings tree. +_STATIC_INFO: dict[str, Any] = { + "type": "group", + "children": { + "setup": { + "type": "group", + "children": { + "general": { + "type": "group", + "children": { + "solver": { + "type": "group", + "children": { + "time": {"type": "string"}, + "velocity_formulation": {"type": "string"}, + }, + }, + "gravity": { + "type": "group", + "children": {"enabled": {"type": "boolean"}}, + }, + }, + }, + "models": { + "type": "group", + "children": { + "energy": { + "type": "group", + "children": {"enabled": {"type": "boolean"}}, + }, + "viscous": { + "type": "group", + "children": { + "model": {"type": "string"}, + "k_epsilon_model": {"type": "string"}, + }, + }, + }, + }, + "boundary_conditions": { + "type": "group", + "children": { + "velocity_inlet": { + "type": "named-object", + "object-type": { + "type": "group", + "children": { + "momentum": { + "type": "group", + "children": { + "velocity_magnitude": { + "type": "group", + "children": { + "value": {"type": "real"}, + "units": {"type": "string"}, + }, + } + }, + } + }, + }, + }, + "pressure_outlet": { + "type": "named-object", + "object-type": { + "type": "group", + "children": { + "momentum": { + "type": "group", + "children": { + "gauge_pressure": { + "type": "group", + "children": { + "value": {"type": "real"}, + }, + } + }, + } + }, + }, + }, + }, + }, + }, + }, + "solution": { + "type": "group", + "children": { + "methods": { + "type": "group", + "children": { + "p_v_coupling": { + "type": "group", + "children": {"scheme": {"type": "string"}}, + } + }, + }, + "controls": { + "type": "group", + "children": { + "under_relaxation": { + "type": "group", + "children": { + "pressure": {"type": "real"}, + "velocity": {"type": "real"}, + }, + } + }, + }, + "run_calculation": { + "type": "group", + "children": {"iter_count": {"type": "integer"}}, + }, + "initialization": { + "type": "group", + "children": { + "initialization_methods": {"type": "string"}, + }, + "commands": { + "initialize": { + "type": "command", + "arguments": {}, + } + }, + }, + }, + }, + }, +} + +#: Command handlers: (path, command) → callable(store, **kwargs) → reply +_COMMAND_HANDLERS: dict[tuple[str, str], Any] = { + ( + "solution/initialization", + "initialize", + ): lambda store, **kw: "Initialization complete", +} + +#: Query handlers: (path, query) → callable(store, **kwargs) → reply +_QUERY_HANDLERS: dict[tuple[str, str], Any] = { + ( + "setup/boundary_conditions/velocity_inlet", + "get_zone_names", + ): lambda store, **kw: list( + store["named_objects"].get("setup/boundary_conditions/velocity_inlet", []) + ), +} + + +# --------------------------------------------------------------------------- +# HTTP request handler +# --------------------------------------------------------------------------- + + +class _Handler(BaseHTTPRequestHandler): + """HTTP request handler implementing the provisional REST contract.""" + + # Suppress default request logging to keep test output clean. + def log_message(self, format, *args): # noqa: A002 + pass + + # -- helpers -------------------------------------------------------- + + def _parse_url(self): + parsed = urllib.parse.urlparse(self.path) + params = urllib.parse.parse_qs(parsed.query, keep_blank_values=True) + # Flatten single-value params; keep lists for multi-value params + flat = {k: (v[0] if len(v) == 1 else v) for k, v in params.items()} + return parsed.path.lstrip("/"), flat + + def _read_body(self) -> dict: + length = int(self.headers.get("Content-Length", 0)) + if length: + return json.loads(self.rfile.read(length)) + return {} + + def _send_json(self, data: Any, status: int = 200) -> None: + body = json.dumps(data).encode("utf-8") + self.send_response(status) + self.send_header("Content-Type", "application/json") + self.send_header("Content-Length", str(len(body))) + self.end_headers() + self.wfile.write(body) + + def _send_error(self, status: int, message: str) -> None: + self._send_json({"error": message}, status) + + @property + def _store(self) -> dict: + return self.server.store # type: ignore[attr-defined] + + # -- GET ------------------------------------------------------------ + + def do_GET(self): # noqa: N802 + """Handle HTTP GET requests for REST settings endpoints.""" + path, params = self._parse_url() + + if path == "settings/static-info": + self._send_json({"info": self._store["static_info"]}) + + elif path == "settings/var": + setting_path = params.get("path") + if setting_path is None: + return self._send_error(400, "Missing 'path' parameter") + if setting_path not in self._store["vars"]: + return self._send_error(404, f"Path not found: {setting_path}") + self._send_json({"value": self._store["vars"][setting_path]}) + + elif path == "settings/attrs": + setting_path = params.get("path") + if setting_path is None: + return self._send_error(400, "Missing 'path' parameter") + recursive = params.get("recursive", "false").lower() == "true" + entry = self._store["attrs"].get(setting_path, {"attrs": {}}) + if recursive: + self._send_json(entry) + else: + self._send_json({"attrs": entry.get("attrs", {})}) + + elif path == "settings/object-names": + setting_path = params.get("path") + if setting_path is None: + return self._send_error(400, "Missing 'path' parameter") + names = self._store["named_objects"].get(setting_path, []) + self._send_json({"names": names}) + + elif path == "settings/list-size": + setting_path = params.get("path") + if setting_path is None: + return self._send_error(400, "Missing 'path' parameter") + size = self._store["list_sizes"].get(setting_path, 0) + self._send_json({"size": size}) + + else: + self._send_error(404, f"Unknown endpoint: {path}") + + # -- PUT ------------------------------------------------------------ + + def do_PUT(self): # noqa: N802 + """Handle HTTP PUT requests for REST settings endpoints.""" + path, params = self._parse_url() + body = self._read_body() + + if path == "settings/var": + setting_path = params.get("path") + if setting_path is None: + return self._send_error(400, "Missing 'path' parameter") + if "value" not in body: + return self._send_error(400, "Missing 'value' in request body") + self._store["vars"][setting_path] = body["value"] + self._send_json({}) + + else: + self._send_error(404, f"Unknown endpoint: {path}") + + # -- POST ----------------------------------------------------------- + + def do_POST(self): # noqa: N802 + """Handle HTTP POST requests for REST settings endpoints.""" + path, params = self._parse_url() + body = self._read_body() + + if path == "settings/create": + setting_path = params.get("path") + name = params.get("name") + if not setting_path or not name: + return self._send_error(400, "Missing 'path' or 'name' parameter") + bucket = self._store["named_objects"].setdefault(setting_path, []) + if name not in bucket: + bucket.append(name) + self._send_json({}) + + elif path.startswith("settings/commands/"): + command = path[len("settings/commands/") :] + setting_path = params.get("path", "") + handler = self._store["command_handlers"].get((setting_path, command)) + if handler is None: + # Generic fallback: echo the command name + reply = f"Executed command '{command}' at path '{setting_path}'" + else: + reply = handler(self._store, **body) + self._send_json({"reply": reply}) + + elif path.startswith("settings/queries/"): + query = path[len("settings/queries/") :] + setting_path = params.get("path", "") + handler = self._store["query_handlers"].get((setting_path, query)) + if handler is None: + reply = f"Query '{query}' at path '{setting_path}' returned no data" + else: + reply = handler(self._store, **body) + self._send_json({"reply": reply}) + + else: + self._send_error(404, f"Unknown endpoint: {path}") + + # -- DELETE --------------------------------------------------------- + + def do_DELETE(self): # noqa: N802 + """Handle HTTP DELETE requests for REST settings endpoints.""" + path, params = self._parse_url() + + if path == "settings/object": + setting_path = params.get("path") + name = params.get("name") + if not setting_path or not name: + return self._send_error(400, "Missing 'path' or 'name' parameter") + bucket = self._store["named_objects"].get(setting_path, []) + if name not in bucket: + return self._send_error( + 404, f"Object '{name}' not found at path '{setting_path}'" + ) + bucket.remove(name) + self._send_json({}) + + else: + self._send_error(404, f"Unknown endpoint: {path}") + + # -- PATCH ---------------------------------------------------------- + + def do_PATCH(self): # noqa: N802 + """Handle HTTP PATCH requests for REST settings endpoints.""" + path, params = self._parse_url() + body = self._read_body() + + if path == "settings/rename": + setting_path = params.get("path") + new_name = body.get("new") + old_name = body.get("old") + if not setting_path or not new_name or not old_name: + return self._send_error( + 400, "Missing 'path', 'new', or 'old' parameter" + ) + bucket = self._store["named_objects"].get(setting_path, []) + if old_name not in bucket: + return self._send_error( + 404, f"Object '{old_name}' not found at path '{setting_path}'" + ) + idx = bucket.index(old_name) + bucket[idx] = new_name + self._send_json({}) + + else: + self._send_error(404, f"Unknown endpoint: {path}") + + +# --------------------------------------------------------------------------- +# Server class +# --------------------------------------------------------------------------- + + +class FluentRestMockServer: + """In-process HTTP mock server for the provisional Fluent REST settings API. + + The server runs in a background daemon thread and can be started and stopped + programmatically. The in-memory settings store is a deep copy of the + module-level defaults so each server instance starts with a clean state. + + Parameters + ---------- + port : int, optional + TCP port to listen on. Defaults to ``0``, which lets the OS assign a + free ephemeral port (recommended for tests to avoid port conflicts). + The actual port is available via :attr:`port` after :meth:`start`. + host : str, optional + Hostname/IP to bind to. Defaults to ``"127.0.0.1"``. + + Examples + -------- + >>> server = FluentRestMockServer() + >>> server.start() + >>> print(server.port) # OS-assigned port + >>> server.stop() + """ + + def __init__(self, port: int = 0, host: str = "127.0.0.1") -> None: + self._host = host + self._port = port + self._httpd: socketserver.TCPServer | None = None + self._thread: threading.Thread | None = None + + # Build a fresh deep-copy of the default store. + self.store: dict[str, Any] = { + "vars": copy.deepcopy(_DEFAULT_VARS), + "named_objects": copy.deepcopy(_DEFAULT_NAMED_OBJECTS), + "list_sizes": copy.deepcopy(_DEFAULT_LIST_SIZES), + "attrs": copy.deepcopy(_DEFAULT_ATTRS), + "static_info": copy.deepcopy(_STATIC_INFO), + "command_handlers": dict(_COMMAND_HANDLERS), + "query_handlers": dict(_QUERY_HANDLERS), + } + + @property + def port(self) -> int: + """The TCP port the server is listening on. + + Valid only after :meth:`start` has been called. + """ + if self._httpd is None: + return self._port + return self._httpd.server_address[1] + + @property + def base_url(self) -> str: + """Convenience base URL, e.g. ``"http://127.0.0.1:54321"``.""" + return f"http://{self._host}:{self.port}" + + def start(self) -> "FluentRestMockServer": + """Start the server in a background daemon thread. + + Returns *self* to allow chaining:: + + client = FluentRestClient(FluentRestMockServer().start().base_url) + + Raises + ------ + RuntimeError + If the server is already running. + """ + if self._httpd is not None: + raise RuntimeError("Server is already running.") + + # Allow port reuse so tests can restart quickly. + socketserver.TCPServer.allow_reuse_address = True + httpd = socketserver.TCPServer((self._host, self._port), _Handler) + # Inject the store reference into the server so handlers can access it. + httpd.store = self.store # type: ignore[attr-defined] + self._httpd = httpd + + self._thread = threading.Thread( + target=httpd.serve_forever, daemon=True, name="FluentRestMockServer" + ) + self._thread.start() + return self + + def stop(self) -> None: + """Shut down the server and wait for the background thread to finish.""" + if self._httpd is None: + return + self._httpd.shutdown() + self._httpd.server_close() + if self._thread is not None: + self._thread.join(timeout=5) + self._httpd = None + self._thread = None + + # Context-manager support ---------------------------------------- + + def __enter__(self) -> "FluentRestMockServer": + return self.start() + + def __exit__(self, *_) -> None: + self.stop() diff --git a/src/ansys/fluent/core/rest/tests/__init__.py b/src/ansys/fluent/core/rest/tests/__init__.py new file mode 100644 index 000000000000..015821eebcc2 --- /dev/null +++ b/src/ansys/fluent/core/rest/tests/__init__.py @@ -0,0 +1,22 @@ +# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Tests for the REST settings transport layer (Step 1 exploration).""" diff --git a/src/ansys/fluent/core/rest/tests/conftest.py b/src/ansys/fluent/core/rest/tests/conftest.py new file mode 100644 index 000000000000..5c694a6b60fd --- /dev/null +++ b/src/ansys/fluent/core/rest/tests/conftest.py @@ -0,0 +1,39 @@ +# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Shared pytest fixtures for the REST transport tests.""" + +import pytest + +from ansys.fluent.core.rest import FluentRestClient, FluentRestMockServer + + +@pytest.fixture(scope="module") +def rest_server(): + """Start a single mock-server instance shared across all tests in a module.""" + with FluentRestMockServer() as srv: + yield srv + + +@pytest.fixture(scope="module") +def rest_client(rest_server): + """Return a FluentRestClient pointed at the module-scoped mock server.""" + return FluentRestClient(rest_server.base_url) diff --git a/src/ansys/fluent/core/rest/tests/test_rest_client.py b/src/ansys/fluent/core/rest/tests/test_rest_client.py new file mode 100644 index 000000000000..4c05cc4b48b6 --- /dev/null +++ b/src/ansys/fluent/core/rest/tests/test_rest_client.py @@ -0,0 +1,324 @@ +# Copyright (C) 2021 - 2026 ANSYS, Inc. and/or its affiliates. +# SPDX-License-Identifier: MIT +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Tests for the REST settings client and mock server (Step 1 exploration). + +All REST transport components live under +``src/ansys/fluent/core/rest/``. These tests run entirely in-process +with no Fluent instance required. + +Run with:: + + pytest src/ansys/fluent/core/rest/tests/ -v +""" + +# pylint: disable=missing-class-docstring,missing-function-docstring + +import pytest + +from ansys.fluent.core.rest import FluentRestClient, FluentRestMockServer +from ansys.fluent.core.rest.client import FluentRestError + +# --------------------------------------------------------------------------- +# FluentRestMockServer tests +# --------------------------------------------------------------------------- + + +class TestMockServer: + def test_server_starts_and_stops(self): + """Server can be started, queried, and stopped cleanly.""" + srv = FluentRestMockServer() + srv.start() + assert srv.port > 0 + assert srv.base_url.startswith("http://127.0.0.1:") + srv.stop() + assert srv._httpd is None + + def test_context_manager(self): + """Server supports the context-manager protocol.""" + with FluentRestMockServer() as srv: + assert srv.port > 0 + assert srv._httpd is None + + def test_start_twice_raises(self): + with FluentRestMockServer() as srv: + with pytest.raises(RuntimeError, match="already running"): + srv.start() + + def test_each_instance_has_independent_store(self): + """Two server instances do not share state.""" + with FluentRestMockServer() as srv1, FluentRestMockServer() as srv2: + c1 = FluentRestClient(srv1.base_url) + c2 = FluentRestClient(srv2.base_url) + c1.set_var("setup/models/energy/enabled", False) + # srv2 should still have the default True + assert c2.get_var("setup/models/energy/enabled") is True + + +# --------------------------------------------------------------------------- +# get_static_info +# --------------------------------------------------------------------------- + + +class TestGetStaticInfo: + def test_returns_dict(self, rest_client): + info = rest_client.get_static_info() + assert isinstance(info, dict) + assert info["type"] == "group" + + def test_top_level_children(self, rest_client): + info = rest_client.get_static_info() + assert "setup" in info["children"] + assert "solution" in info["children"] + + def test_nested_energy_node(self, rest_client): + info = rest_client.get_static_info() + energy = info["children"]["setup"]["children"]["models"]["children"]["energy"] + assert energy["children"]["enabled"]["type"] == "boolean" + + +# --------------------------------------------------------------------------- +# get_var / set_var +# --------------------------------------------------------------------------- + + +class TestGetSetVar: + def test_get_existing_bool(self, rest_client): + assert rest_client.get_var("setup/models/energy/enabled") is True + + def test_get_existing_string(self, rest_client): + assert rest_client.get_var("setup/general/solver/time") == "steady" + + def test_get_existing_int(self, rest_client): + assert rest_client.get_var("solution/run_calculation/iter_count") == 100 + + def test_get_existing_float(self, rest_client): + val = rest_client.get_var( + "setup/boundary_conditions/velocity_inlet/inlet/momentum/velocity_magnitude/value" + ) + assert val == pytest.approx(1.0) + + def test_get_unknown_path_raises(self, rest_client): + with pytest.raises(FluentRestError) as exc_info: + rest_client.get_var("nonexistent/path") + assert exc_info.value.status == 404 + + def test_set_then_get_bool(self, rest_client): + rest_client.set_var("setup/models/energy/enabled", False) + assert rest_client.get_var("setup/models/energy/enabled") is False + # Restore + rest_client.set_var("setup/models/energy/enabled", True) + + def test_set_then_get_string(self, rest_client): + rest_client.set_var("setup/general/solver/time", "transient") + assert rest_client.get_var("setup/general/solver/time") == "transient" + rest_client.set_var("setup/general/solver/time", "steady") + + def test_set_then_get_float(self, rest_client): + rest_client.set_var("solution/controls/under_relaxation/pressure", 0.5) + assert rest_client.get_var( + "solution/controls/under_relaxation/pressure" + ) == pytest.approx(0.5) + rest_client.set_var("solution/controls/under_relaxation/pressure", 0.3) + + def test_set_creates_new_path(self, rest_client): + """set_var should accept new paths (no pre-population required).""" + rest_client.set_var("setup/new/custom/setting", 42) + assert rest_client.get_var("setup/new/custom/setting") == 42 + + def test_set_dict_value(self, rest_client): + rest_client.set_var("setup/new/dict/setting", {"key": "val"}) + assert rest_client.get_var("setup/new/dict/setting") == {"key": "val"} + + def test_set_list_value(self, rest_client): + rest_client.set_var("setup/new/list/setting", [1, 2, 3]) + assert rest_client.get_var("setup/new/list/setting") == [1, 2, 3] + + +# --------------------------------------------------------------------------- +# get_attrs +# --------------------------------------------------------------------------- + + +class TestGetAttrs: + def test_known_path_returns_allowed_values(self, rest_client): + result = rest_client.get_attrs( + "setup/models/viscous/model", ["allowed-values", "active?"] + ) + attrs = result["attrs"] + assert "allowed-values" in attrs + assert "k-epsilon" in attrs["allowed-values"] + + def test_unknown_path_returns_empty_attrs(self, rest_client): + result = rest_client.get_attrs( + "setup/models/viscous/non_existing", ["allowed-values"] + ) + assert result["attrs"] == {} + + def test_recursive_flag_returns_attrs_key(self, rest_client): + result = rest_client.get_attrs( + "setup/models/energy/enabled", ["active?"], recursive=True + ) + assert "attrs" in result + + +# --------------------------------------------------------------------------- +# get_object_names / create / delete / rename +# --------------------------------------------------------------------------- + + +class TestNamedObjects: + def test_get_existing_object_names(self, rest_client): + names = rest_client.get_object_names("setup/boundary_conditions/velocity_inlet") + assert "inlet" in names + + def test_get_names_for_unknown_path_returns_empty(self, rest_client): + names = rest_client.get_object_names("setup/boundary_conditions/wall") + assert names == [] + + def test_create_object(self, rest_client): + rest_client.create("setup/boundary_conditions/wall", "wall-1") + names = rest_client.get_object_names("setup/boundary_conditions/wall") + assert "wall-1" in names + + def test_create_duplicate_is_idempotent(self, rest_client): + rest_client.create("setup/boundary_conditions/wall", "wall-1") + rest_client.create("setup/boundary_conditions/wall", "wall-1") + names = rest_client.get_object_names("setup/boundary_conditions/wall") + assert names.count("wall-1") == 1 + + def test_delete_object(self, rest_client): + rest_client.create("setup/boundary_conditions/wall", "wall-to-delete") + rest_client.delete("setup/boundary_conditions/wall", "wall-to-delete") + names = rest_client.get_object_names("setup/boundary_conditions/wall") + assert "wall-to-delete" not in names + + def test_delete_nonexistent_raises(self, rest_client): + with pytest.raises(FluentRestError) as exc_info: + rest_client.delete("setup/boundary_conditions/wall", "ghost") + assert exc_info.value.status == 404 + + def test_rename_object(self, rest_client): + rest_client.create("setup/boundary_conditions/wall", "old-name") + rest_client.rename( + "setup/boundary_conditions/wall", new="new-name", old="old-name" + ) + names = rest_client.get_object_names("setup/boundary_conditions/wall") + assert "new-name" in names + assert "old-name" not in names + + def test_rename_nonexistent_raises(self, rest_client): + with pytest.raises(FluentRestError) as exc_info: + rest_client.rename( + "setup/boundary_conditions/wall", new="x", old="does-not-exist" + ) + assert exc_info.value.status == 404 + + +# --------------------------------------------------------------------------- +# get_list_size +# --------------------------------------------------------------------------- + + +class TestListSize: + def test_known_path(self, rest_client): + size = rest_client.get_list_size( + "solution/run_calculation/pseudo_time_settings" + "/timestepping_parameters/profile_update_interval" + ) + assert size == 1 + + def test_unknown_path_returns_zero(self, rest_client): + assert rest_client.get_list_size("solution/run_calculation/unknown_list") == 0 + + +# --------------------------------------------------------------------------- +# execute_cmd +# --------------------------------------------------------------------------- + + +class TestExecuteCmd: + def test_registered_command(self, rest_client): + reply = rest_client.execute_cmd("solution/initialization", "initialize") + assert reply == "Initialization complete" + + def test_unregistered_command_returns_generic_reply(self, rest_client): + reply = rest_client.execute_cmd("some/path", "do_something", x=1) + assert "do_something" in reply + assert "some/path" in reply + + +# --------------------------------------------------------------------------- +# execute_query +# --------------------------------------------------------------------------- + + +class TestExecuteQuery: + def test_registered_query(self, rest_client): + reply = rest_client.execute_query( + "setup/boundary_conditions/velocity_inlet", "get_zone_names" + ) + assert isinstance(reply, list) + assert "inlet" in reply + + def test_unregistered_query_returns_generic_reply(self, rest_client): + reply = rest_client.execute_query("some/path", "info_query") + assert "info_query" in reply + + +# --------------------------------------------------------------------------- +# Helper methods (no server round-trip) +# --------------------------------------------------------------------------- + + +class TestHelpers: + def test_is_interactive_mode_returns_false(self, rest_client): + assert rest_client.is_interactive_mode() is False + + @pytest.mark.parametrize( + "name, expected", + [ + ("*", True), + ("inlet_*", True), + ("?nlet", True), + ("[abc]inlet", True), + ("plain-name", False), + ("inlet", False), + ], + ) + def test_has_wildcard(self, rest_client, name, expected): + assert rest_client.has_wildcard(name) is expected + + +# --------------------------------------------------------------------------- +# FluentRestError +# --------------------------------------------------------------------------- + + +class TestFluentRestError: + def test_str_representation(self): + err = FluentRestError(404, "Not found") + assert "404" in str(err) + assert "Not found" in str(err) + + def test_status_attribute(self): + err = FluentRestError(500, "Server error") + assert err.status == 500 From 2995fed426fb37da59faf383ac53963efb4a4013 Mon Sep 17 00:00:00 2001 From: pyansys-ci-bot <92810346+pyansys-ci-bot@users.noreply.github.com> Date: Thu, 26 Mar 2026 09:27:30 +0000 Subject: [PATCH 3/3] chore: adding changelog file 5015.added.md [dependabot-skip] --- doc/changelog.d/5015.added.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/changelog.d/5015.added.md diff --git a/doc/changelog.d/5015.added.md b/doc/changelog.d/5015.added.md new file mode 100644 index 000000000000..18c3f91acf9e --- /dev/null +++ b/doc/changelog.d/5015.added.md @@ -0,0 +1 @@ +Connection over rest