diff --git a/doc/changelog.d/4973.fixed.md b/doc/changelog.d/4973.fixed.md new file mode 100644 index 000000000000..4d311bbb4e5b --- /dev/null +++ b/doc/changelog.d/4973.fixed.md @@ -0,0 +1 @@ +Make type annotations runtime-safe for bear-type compatibility diff --git a/pyproject.toml b/pyproject.toml index 606972420141..e184b3fe951a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,6 +20,7 @@ classifiers = [ dependencies = [ "ansys-api-fluent>=0.3.37", "ansys-platform-instancemanagement~=1.1", + "beartype>=0.18.0", "ansys-tools-common>=0.4.0", "ansys-tools-filetransfer>=0.2,<1.0", "ansys-units~=0.10.0", diff --git a/src/ansys/fluent/core/__init__.py b/src/ansys/fluent/core/__init__.py index cc94783aa26e..b1d7346957c3 100644 --- a/src/ansys/fluent/core/__init__.py +++ b/src/ansys/fluent/core/__init__.py @@ -22,6 +22,13 @@ """A package providing Fluent's Solver and Meshing capabilities in Python.""" +# NOTE: beartype_this_package() is intentionally NOT used here. +# `ansys` and `ansys.fluent` are implicit PEP 420 namespace packages +# (no __init__.py at those levels). beartype's package-level hook +# requires a regular package with __init__.py at the namespace root. +# The @beartype decorator is applied manually to public API functions instead. +# Upstream tracking: https://github.com/beartype/beartype/issues/286 + import os import pydoc import warnings diff --git a/src/ansys/fluent/core/_types.py b/src/ansys/fluent/core/_types.py index bf074d4253e0..8c2332ae0673 100644 --- a/src/ansys/fluent/core/_types.py +++ b/src/ansys/fluent/core/_types.py @@ -26,10 +26,8 @@ This module centralizes reusable typing constructs """ -from __future__ import annotations - import os from typing import TypeAlias -PathType: TypeAlias = "os.PathLike[str] | os.PathLike[bytes] | str | bytes" +PathType: TypeAlias = os.PathLike[str] | os.PathLike[bytes] | str | bytes """Type alias for file system paths.""" diff --git a/src/ansys/fluent/core/codegen/builtin_settingsgen.py b/src/ansys/fluent/core/codegen/builtin_settingsgen.py index fe1039bb3c47..ff0fe7423105 100644 --- a/src/ansys/fluent/core/codegen/builtin_settingsgen.py +++ b/src/ansys/fluent/core/codegen/builtin_settingsgen.py @@ -116,9 +116,11 @@ def generate(version: str): f.write(f", {named_object}: str") f.write(", settings_source: SettingsBase | Solver | None = None") if kind == "NonCreatableNamedObject": - f.write(", name: str = None") + f.write(", name: str | None = None") elif kind == "CreatableNamedObject": - f.write(", name: str = None, new_instance_name: str = None") + f.write( + ", name: str | None = None, new_instance_name: str | None = None" + ) f.write("):\n") f.write(" super().__init__(settings_source=settings_source") if kind == "NonCreatableNamedObject": diff --git a/src/ansys/fluent/core/examples/downloads.py b/src/ansys/fluent/core/examples/downloads.py index 199881d1c2b0..d1d894b12edd 100644 --- a/src/ansys/fluent/core/examples/downloads.py +++ b/src/ansys/fluent/core/examples/downloads.py @@ -73,7 +73,7 @@ def _get_file_url(file_name: str, directory: str | None = None) -> str: def _retrieve_file( url: str, file_name: str, - save_path: "PathType | None" = None, + save_path: PathType | None = None, return_without_path: bool | None = False, ) -> str: """Download specified file from specified URL.""" @@ -120,7 +120,7 @@ def _retrieve_file( def download_file( file_name: str, directory: str | None = None, - save_path: "PathType | None" = None, + save_path: PathType | None = None, return_without_path: bool | None = None, ) -> str: """Download specified example file from the Ansys example data repository. diff --git a/src/ansys/fluent/core/field_data_interfaces.py b/src/ansys/fluent/core/field_data_interfaces.py index 99da58d0164a..7f6ae8f74732 100644 --- a/src/ansys/fluent/core/field_data_interfaces.py +++ b/src/ansys/fluent/core/field_data_interfaces.py @@ -113,7 +113,7 @@ class BaseFieldInfo(ABC): @abstractmethod def get_scalar_field_range( - self, field: str, node_value: bool = False, surface_ids: List[int] = None + self, field: str, node_value: bool = False, surface_ids: List[int] | None = None ) -> List[float]: """ Retrieve the range (minimum and maximum values) of a scalar field. @@ -412,14 +412,14 @@ def __init__(self, available_field_names, field_info): self._field_info = field_info def range( - self, field: str, node_value: bool = False, surface_ids: list[int] = None + self, field: str, node_value: bool = False, surface_ids: list[int] | None = None ) -> list[float]: """Get the range (minimum and maximum values) of the field. Parameters ---------- field: str - Field name + Field namex node_value: bool surface_ids : List[int], optional List of surface IDS for the surface data. diff --git a/src/ansys/fluent/core/file_session.py b/src/ansys/fluent/core/file_session.py index 8c7690a699d4..e06fecb212c0 100644 --- a/src/ansys/fluent/core/file_session.py +++ b/src/ansys/fluent/core/file_session.py @@ -966,7 +966,7 @@ def __init__(self, file_session): self._file_session = file_session def get_scalar_field_range( - self, field: str, node_value: bool = False, surface_ids: List[int] = None + self, field: str, node_value: bool = False, surface_ids: List[int] | None = None ) -> List[float]: """Get the range (minimum and maximum values) of the field. @@ -990,7 +990,7 @@ def get_scalar_field_range( return self._get_scalar_field_range(field, node_value, surface_ids) def _get_scalar_field_range( - self, field: str, node_value: bool = False, surface_ids: List[int] = None + self, field: str, node_value: bool = False, surface_ids: List[int] | None = None ) -> List[float]: minimum = None maximum = None diff --git a/src/ansys/fluent/core/filereader/case_file.py b/src/ansys/fluent/core/filereader/case_file.py index cd0ad4a4e5d9..3518a4a5a92f 100644 --- a/src/ansys/fluent/core/filereader/case_file.py +++ b/src/ansys/fluent/core/filereader/case_file.py @@ -617,8 +617,8 @@ class CaseFile(RPVarProcessor): def __init__( self, - case_file_name: "PathType | None" = None, - project_file_name: "PathType | None" = None, + case_file_name: PathType | None = None, + project_file_name: PathType | None = None, ) -> None: """Initialize a CaseFile object. Exactly one file path argument must be specified. diff --git a/src/ansys/fluent/core/fluent_connection.py b/src/ansys/fluent/core/fluent_connection.py index 926f86d80222..f11c077e0bb4 100644 --- a/src/ansys/fluent/core/fluent_connection.py +++ b/src/ansys/fluent/core/fluent_connection.py @@ -22,8 +22,6 @@ """Provides a module for Fluent connection functionality.""" -from __future__ import annotations - from contextlib import suppress import ctypes from ctypes import c_int, sizeof diff --git a/src/ansys/fluent/core/launcher/launcher.py b/src/ansys/fluent/core/launcher/launcher.py index 0f8c149a404c..3e1a31106205 100644 --- a/src/ansys/fluent/core/launcher/launcher.py +++ b/src/ansys/fluent/core/launcher/launcher.py @@ -29,9 +29,11 @@ import inspect import logging import os -from typing import Any, Dict +from typing import Any from warnings import warn +from beartype import beartype + from ansys.fluent.core._types import PathType from ansys.fluent.core.exceptions import DisallowedValuesError from ansys.fluent.core.fluent_connection import FluentConnection @@ -77,8 +79,9 @@ logger = logging.getLogger("pyfluent.launcher") +@beartype def create_launcher( - fluent_launch_mode: LaunchMode = LaunchMode.STANDALONE, **kwargs + fluent_launch_mode: LaunchMode = LaunchMode.STANDALONE, **kwargs: Any ) -> DockerLauncher | PIMLauncher | SlurmLauncher | StandaloneLauncher: """Use the factory function to create a launcher for supported launch modes. @@ -170,15 +173,16 @@ def _custom_converter_dimension(kwargs): version="v0.22.0", converter=_custom_converter_dimension, ) +@beartype def launch_fluent( product_version: FluentVersion | str | float | int | None = None, dimension: Dimension | int | None = None, precision: Precision | str | None = None, processor_count: int | None = None, journal_file_names: None | str | list[str] = None, - start_timeout: int = None, + start_timeout: int | None = None, additional_arguments: str = "", - env: Dict[str, Any] | None = None, + env: dict[str, Any] | None = None, start_container: bool | None = None, container_dict: dict | None = None, dry_run: bool = False, @@ -188,14 +192,14 @@ def launch_fluent( graphics_driver: ( FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None ) = None, - case_file_name: "PathType | None" = None, - case_data_file_name: "PathType | None" = None, + case_file_name: PathType | None = None, + case_data_file_name: PathType | None = None, lightweight_mode: bool | None = None, mode: FluentMode | str | None = None, py: bool | None = None, gpu: bool | list[int] | None = None, - cwd: "PathType | None" = None, - fluent_path: "PathType | None" = None, + cwd: PathType | None = None, + fluent_path: PathType | None = None, topy: str | list | None = None, start_watchdog: bool | None = None, scheduler_options: dict | None = None, @@ -410,6 +414,7 @@ def _mode_to_launcher_type(fluent_launch_mode: LaunchMode): return launcher() +@beartype def connect_to_fluent( ip: str | None = None, port: int | None = None, diff --git a/src/ansys/fluent/core/launcher/slurm_launcher.py b/src/ansys/fluent/core/launcher/slurm_launcher.py index 85ed7a636d6b..bedff9918650 100644 --- a/src/ansys/fluent/core/launcher/slurm_launcher.py +++ b/src/ansys/fluent/core/launcher/slurm_launcher.py @@ -321,7 +321,7 @@ def done(self) -> bool: return self._get_state() in ["", "CANCELLED", "COMPLETED"] def result( - self, timeout: int = None + self, timeout: int | None = None ) -> Meshing | PureMeshing | Solver | SolverIcing: """Return the session instance corresponding to the Fluent launch. If Fluent hasn't yet launched, then this method will wait up to timeout seconds. If Fluent @@ -345,7 +345,7 @@ def result( """ return self._future.result(timeout) - def exception(self, timeout: int = None) -> Exception: + def exception(self, timeout: int | None = None) -> Exception: """Return the exception raised by the Fluent launch. If Fluent hasn't yet launched, then this method will wait up to timeout seconds. If Fluent hasn't launched in timeout seconds, then a TimeoutError will be raised. If timeout is @@ -397,13 +397,13 @@ def __init__( env: Dict[str, Any] | None = None, cleanup_on_exit: bool = True, start_transcript: bool = True, - case_file_name: "PathType | None" = None, - case_data_file_name: "PathType | None" = None, + case_file_name: PathType | None = None, + case_data_file_name: PathType | None = None, lightweight_mode: bool | None = None, py: bool | None = None, gpu: bool | None = None, - cwd: "PathType | None" = None, - fluent_path: "PathType | None" = None, + cwd: PathType | None = None, + fluent_path: PathType | None = None, topy: str | list | None = None, start_watchdog: bool | None = None, scheduler_options: dict | None = None, diff --git a/src/ansys/fluent/core/launcher/standalone_launcher.py b/src/ansys/fluent/core/launcher/standalone_launcher.py index 7812a2534586..3aac2fb1f161 100644 --- a/src/ansys/fluent/core/launcher/standalone_launcher.py +++ b/src/ansys/fluent/core/launcher/standalone_launcher.py @@ -40,7 +40,9 @@ import os from pathlib import Path import subprocess -from typing import Any, Dict +from typing import Any + +from beartype import beartype from ansys.fluent.core._types import PathType from ansys.fluent.core.launcher.error_handler import ( @@ -77,12 +79,13 @@ class StandaloneLauncher: """Instantiates Fluent session in standalone mode.""" + @beartype def __init__( self, mode: FluentMode | str | None = None, ui_mode: UIMode | str | None = None, graphics_driver: ( - FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str + FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None ) = None, product_version: FluentVersion | str | float | int | None = None, dimension: Dimension | int | None = None, @@ -91,17 +94,17 @@ def __init__( journal_file_names: None | str | list[str] = None, start_timeout: int = 60, additional_arguments: str = "", - env: Dict[str, Any] | None = None, + env: dict[str, Any] | None = None, cleanup_on_exit: bool = True, dry_run: bool = False, start_transcript: bool = True, - case_file_name: "PathType | None" = None, - case_data_file_name: "PathType | None" = None, + case_file_name: PathType | None = None, + case_data_file_name: PathType | None = None, lightweight_mode: bool | None = None, py: bool | None = None, gpu: bool | None = None, - cwd: "PathType | None" = None, - fluent_path: "PathType | None" = None, + cwd: PathType | None = None, + fluent_path: PathType | None = None, topy: str | list | None = None, start_watchdog: bool | None = None, file_transfer_service: Any | None = None, diff --git a/src/ansys/fluent/core/meshing/meshing_workflow.py b/src/ansys/fluent/core/meshing/meshing_workflow.py index 1ac53d825343..f57e0eaf87ad 100644 --- a/src/ansys/fluent/core/meshing/meshing_workflow.py +++ b/src/ansys/fluent/core/meshing/meshing_workflow.py @@ -23,8 +23,6 @@ """Meshing workflow specialization of the Workflow module that wraps and extends the core functionality.""" -from __future__ import annotations - from enum import Enum import os diff --git a/src/ansys/fluent/core/meshing/meshing_workflow_new.py b/src/ansys/fluent/core/meshing/meshing_workflow_new.py index db2ff719fb8d..82897faac835 100644 --- a/src/ansys/fluent/core/meshing/meshing_workflow_new.py +++ b/src/ansys/fluent/core/meshing/meshing_workflow_new.py @@ -23,8 +23,6 @@ """Meshing workflow specialization of the Workflow module that wraps and extends the core functionality.""" -from __future__ import annotations - from enum import Enum import os @@ -271,7 +269,7 @@ def __init__( workflow: PyMenuGeneric, meshing: PyMenuGeneric, fluent_version: FluentVersion, - file_path: PathType = None, + file_path: PathType | None = None, initialize: bool = True, ) -> None: """Initialize a ``LoadWorkflow`` instance. diff --git a/src/ansys/fluent/core/search.py b/src/ansys/fluent/core/search.py index a804dfda9c70..9e8b1786f8c5 100644 --- a/src/ansys/fluent/core/search.py +++ b/src/ansys/fluent/core/search.py @@ -420,7 +420,7 @@ def _search_whole_word( search_string: str, match_case: bool = False, match_whole_word: bool = True, - api_tree_data: dict = None, + api_tree_data: dict | None = None, api_path: str | None = None, ): """Perform exact search for a word through the Fluent's object hierarchy. diff --git a/src/ansys/fluent/core/services/field_data.py b/src/ansys/fluent/core/services/field_data.py index a624864f89dc..85e4028719f5 100644 --- a/src/ansys/fluent/core/services/field_data.py +++ b/src/ansys/fluent/core/services/field_data.py @@ -176,7 +176,7 @@ def __init__( self._is_data_valid = is_data_valid def get_scalar_field_range( - self, field: str, node_value: bool = False, surface_ids: List[int] = None + self, field: str, node_value: bool = False, surface_ids: List[int] | None = None ) -> List[float]: """Get the range (minimum and maximum values) of the field. @@ -200,7 +200,7 @@ def get_scalar_field_range( return self._get_scalar_field_range(field, node_value, surface_ids) def _get_scalar_field_range( - self, field: str, node_value: bool = False, surface_ids: List[int] = None + self, field: str, node_value: bool = False, surface_ids: List[int] | None = None ) -> List[float]: if not surface_ids: surface_ids = [] @@ -1105,7 +1105,7 @@ class ChunkParser: field : numpy array """ - def __init__(self, callbacks_provider: object = None): + def __init__(self, callbacks_provider: object | None = None): """__init__ method of ChunkParser class.""" self._callbacks_provider = callbacks_provider diff --git a/src/ansys/fluent/core/services/scheme_eval.py b/src/ansys/fluent/core/services/scheme_eval.py index 4428ae66e291..f6c3355828d4 100644 --- a/src/ansys/fluent/core/services/scheme_eval.py +++ b/src/ansys/fluent/core/services/scheme_eval.py @@ -88,7 +88,7 @@ def string_eval( def scheme_eval( self, request: SchemeEvalProtoModule.SchemeEvalRequest, - metadata: list[tuple[str, str]] = None, + metadata: list[tuple[str, str]] | None = None, ) -> SchemeEvalProtoModule.SchemeEvalResponse: """SchemeEval RPC of SchemeEval service.""" new_metadata = self.__metadata diff --git a/src/ansys/fluent/core/session.py b/src/ansys/fluent/core/session.py index 423fa213b0c3..6d80d4a7a157 100644 --- a/src/ansys/fluent/core/session.py +++ b/src/ansys/fluent/core/session.py @@ -30,6 +30,7 @@ import warnings import weakref +from beartype import beartype from deprecated.sphinx import deprecated from ansys.fluent.core._types import PathType @@ -263,6 +264,7 @@ def _build_from_fluent_connection( self._fluent_connection.register_finalizer_cb(obj.stop) @deprecate_function(version="v0.38.0", new_func="is_active") + @beartype def is_server_healthy(self) -> bool: """Whether the current session is healthy (i.e. the server is 'SERVING').""" return self._is_server_healthy() @@ -271,6 +273,7 @@ def _is_server_healthy(self) -> bool: """Whether the current session is healthy (i.e. the server is 'SERVING').""" return self._health_check.is_serving + @beartype def is_active(self) -> bool: """Whether the current session is active.""" return self._fluent_connection is not None and self._is_server_healthy() @@ -308,6 +311,7 @@ def field_data_streaming(self): return self.fields.field_data_streaming @property + @beartype def id(self) -> str: """Return the session ID.""" return self._fluent_connection._id @@ -379,10 +383,12 @@ def _create_from_server_info_file( ) return session + @beartype def execute_tui(self, command: str) -> None: """Executes a tui command.""" self.scheme.eval(f"(ti-menu-load-string {json.dumps(command)})") + @beartype def get_fluent_version(self) -> FluentVersion: """Gets and returns the fluent version.""" return FluentVersion(self.scheme.version) @@ -415,6 +421,7 @@ def wait_process_finished(self, wait: float | int | bool = 60): """ return self._fluent_connection_backup.wait_process_finished() + @beartype def exit(self, **kwargs) -> None: """Exit session. @@ -431,11 +438,13 @@ def _exit(self, **kwargs) -> None: self._fluent_connection.exit(**kwargs) self._fluent_connection = None + @beartype def force_exit(self) -> None: """Forces the Fluent session to exit, losing unsaved progress and data.""" self._exit_compose_service() self._fluent_connection.force_exit() + @beartype def file_exists_on_remote(self, file_name: str) -> bool: """Check if remote file exists. @@ -493,6 +502,7 @@ def download(self, file_name: str, local_directory: str | None = None): ) return self._file_transfer_service.download(file_name, local_directory) + @beartype def chdir(self, path: PathType) -> None: """Change Fluent working directory. diff --git a/src/ansys/fluent/core/session_base_meshing.py b/src/ansys/fluent/core/session_base_meshing.py index 7a93cd3b5f4f..1b0799eb1e76 100644 --- a/src/ansys/fluent/core/session_base_meshing.py +++ b/src/ansys/fluent/core/session_base_meshing.py @@ -333,7 +333,7 @@ def topology_based_meshing_workflow( def load_workflow( self, - file_path: PathType = None, + file_path: PathType | None = None, initialize: bool = True, legacy: bool | None = None, ): diff --git a/src/ansys/fluent/core/session_utilities.py b/src/ansys/fluent/core/session_utilities.py index fff2926411f3..57fea5f56151 100644 --- a/src/ansys/fluent/core/session_utilities.py +++ b/src/ansys/fluent/core/session_utilities.py @@ -62,7 +62,7 @@ def from_install( cls, ui_mode: UIMode | str | None = None, graphics_driver: ( - FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str + FluentWindowsGraphicsDriver | FluentLinuxGraphicsDriver | str | None ) = None, product_version: FluentVersion | str | float | int | None = None, dimension: Dimension | int | None = None, @@ -75,13 +75,13 @@ def from_install( cleanup_on_exit: bool = True, dry_run: bool = False, start_transcript: bool = True, - case_file_name: "PathType | None" = None, - case_data_file_name: "PathType | None" = None, + case_file_name: PathType | None = None, + case_data_file_name: PathType | None = None, lightweight_mode: bool | None = None, py: bool | None = None, gpu: bool | None = None, - cwd: "PathType | None" = None, - fluent_path: "PathType | None" = None, + cwd: PathType | None = None, + fluent_path: PathType | None = None, topy: str | list | None = None, start_watchdog: bool | None = None, file_transfer_service: Any | None = None, diff --git a/src/ansys/fluent/core/solver/flobject.py b/src/ansys/fluent/core/solver/flobject.py index 5fb82cf9c500..4c79cc540919 100644 --- a/src/ansys/fluent/core/solver/flobject.py +++ b/src/ansys/fluent/core/solver/flobject.py @@ -38,8 +38,6 @@ >>> r.boundary_conditions.velocity_inlet['inlet'].vmag.constant = 20 """ -from __future__ import annotations - import collections from contextlib import contextmanager, nullcontext, suppress import fnmatch @@ -67,6 +65,7 @@ _eval_type, get_args, get_origin, + no_type_check, ) import warnings import weakref @@ -276,6 +275,9 @@ def _is_deprecated(obj) -> bool | None: ) +# no_type_check: __setattr__ unconditionally raises AttributeError, preventing +# beartype from writing wrapper attributes on instances or subclasses. +@no_type_check class Base: """Provides the base class for settings and command objects. @@ -797,6 +799,9 @@ def _create_child(cls, name, parent: weakref.CallableProxyType, alias_path=None) return cls(name, parent) +# no_type_check: generic proxy base; instances communicate with a live gRPC +# service; attribute writes intercepted by Base.__setattr__. +@no_type_check class SettingsBase(Base, Generic[StateT]): """Base class for settings objects. @@ -1055,6 +1060,9 @@ def _get_type_for_completer_info(cls) -> str: return cls.__bases__[0].__name__ +# no_type_check: defines __setattr__ and __getattribute__ proxying to gRPC; +# beartype inspection would trigger unintended network calls. +@no_type_check class Group(SettingsBase[DictStateType]): """A ``Group`` container object. @@ -1236,6 +1244,8 @@ def __setattr__(self, name: str, value): raise +# no_type_check: defines __getattr__ proxying wildcard paths to gRPC. +@no_type_check class WildcardPath(Group): """Class wrapping a wildcard path to perform get_var and set_var on flproxy.""" @@ -1332,6 +1342,9 @@ def __setitem__(self, name, value): ChildTypeT = TypeVar("ChildTypeT") +# no_type_check: defines __getattr__ proxying to gRPC; subclasses generated +# dynamically by get_cls() factory and are not statically analysable. +@no_type_check class NamedObject(SettingsBase[DictStateType], Generic[ChildTypeT]): """A ``NamedObject`` container is a container object similar to a Python dictionary object. Generally, many such objects can be created with different names. @@ -1595,6 +1608,9 @@ def _rename(obj: NamedObject | _Alias, new: str, old: str): obj._create_child_object(new) +# no_type_check: defines __getattr__ proxying to gRPC; subclasses generated +# dynamically by get_cls() factory. +@no_type_check class ListObject(SettingsBase[ListStateType], Generic[ChildTypeT]): """A ``ListObject`` container is a container object, similar to a Python list object. Generally, many such objects can be created. @@ -1739,6 +1755,9 @@ def _get_new_keywords(obj, *args, **kwds): return newkwds +# no_type_check: defines __getattr__; Command/Query subclasses are +# dynamically generated by get_cls() factory. +@no_type_check class Action(Base): """Intermediate Base class for Command and Query class.""" @@ -2150,6 +2169,9 @@ def __set__(self, instance, value): # pylint: disable=missing-raises-doc +# no_type_check: runtime class factory using type(); produces classes +# dynamically from Fluent metadata and is not statically analysable. +@no_type_check def get_cls(name, info, parent=None, version=None, parent_taboo=None): """Create a class for the object identified by "path".""" try: diff --git a/src/ansys/fluent/core/solver/flunits.py b/src/ansys/fluent/core/solver/flunits.py index 2be657a15507..4a76fdfbb023 100644 --- a/src/ansys/fluent/core/solver/flunits.py +++ b/src/ansys/fluent/core/solver/flunits.py @@ -124,8 +124,6 @@ 'wave-length': 'Angstrom'} """ -from __future__ import annotations - from typing import TypeVar _fl_unit_table = { diff --git a/src/ansys/fluent/core/ui/standalone_web_ui.py b/src/ansys/fluent/core/ui/standalone_web_ui.py index c3f6385b96ca..1f938081aea6 100644 --- a/src/ansys/fluent/core/ui/standalone_web_ui.py +++ b/src/ansys/fluent/core/ui/standalone_web_ui.py @@ -22,8 +22,6 @@ """Web UI for Fluent settings using Panel with lazy loading and batched property access.""" -from __future__ import annotations - from typing import Any, Callable, Dict, List try: diff --git a/src/ansys/fluent/core/utils/deprecate.py b/src/ansys/fluent/core/utils/deprecate.py index 60cde218b29c..83088f3ac5f0 100644 --- a/src/ansys/fluent/core/utils/deprecate.py +++ b/src/ansys/fluent/core/utils/deprecate.py @@ -135,6 +135,7 @@ def wrapper(*args, **kwargs): ) return deprecated_func(*args, **kwargs) + wrapper.__signature__ = inspect.signature(func) return wrapper return decorator @@ -178,6 +179,7 @@ def wrapper(*args, **kwargs): warnings.warn(reason, warning_cls, stacklevel=2) return decorated(*args, **kwargs) + wrapper.__signature__ = inspect.signature(func) return wrapper return decorator diff --git a/src/ansys/fluent/core/workflow.py b/src/ansys/fluent/core/workflow.py index 98a5d09050f6..6070763004f9 100644 --- a/src/ansys/fluent/core/workflow.py +++ b/src/ansys/fluent/core/workflow.py @@ -22,8 +22,6 @@ """Workflow module that wraps and extends the core functionality.""" -from __future__ import annotations - from contextlib import suppress import logging import re @@ -203,7 +201,7 @@ class BaseTask: def __init__( self, - command_source: Workflow, + command_source: "Workflow", task: str, ) -> None: """Initialize BaseTask. @@ -621,7 +619,7 @@ class TaskContainer(PyCallableStateObject): __dir__() """ - def __init__(self, command_source: ClassicWorkflow) -> None: + def __init__(self, command_source: "ClassicWorkflow") -> None: """Initialize TaskContainer. Parameters @@ -969,7 +967,7 @@ class CommandTask(BaseTask): def __init__( self, - command_source: Workflow, + command_source: "Workflow", task: str, ) -> None: """Initialize CommandTask. @@ -984,7 +982,7 @@ def __init__( super().__init__(command_source, task) @property - def command_arguments(self) -> ReadOnlyObject: + def command_arguments(self) -> "ReadOnlyObject": """Get the task's arguments in read-only form (deprecated). Returns @@ -996,7 +994,7 @@ def command_arguments(self) -> ReadOnlyObject: return self._refreshed_command() @property - def _command_arguments(self) -> ReadOnlyObject: + def _command_arguments(self) -> "ReadOnlyObject": return self._refreshed_command() @property @@ -1010,7 +1008,7 @@ def arguments(self) -> ArgumentsWrapper: """ return ArgumentsWrapper(self) - def _refreshed_command(self) -> ReadOnlyObject: + def _refreshed_command(self) -> "ReadOnlyObject": task_arg_state = self._task.Arguments.get_state() cmd = self._command() if task_arg_state: @@ -1038,7 +1036,7 @@ class SimpleTask(CommandTask): def __init__( self, - command_source: Workflow, + command_source: "Workflow", task: str, ) -> None: """Initialize SimpleTask. @@ -1066,7 +1064,7 @@ class CompoundChild(SimpleTask): def __init__( self, - command_source: Workflow, + command_source: "Workflow", task: str, ) -> None: """Initialize CompoundChild. @@ -1111,7 +1109,7 @@ class CompositeTask(BaseTask): def __init__( self, - command_source: Workflow, + command_source: "Workflow", task: str, ) -> None: """Initialize CompositeTask. @@ -1126,7 +1124,7 @@ def __init__( super().__init__(command_source, task) @property - def command_arguments(self) -> ReadOnlyObject: + def command_arguments(self) -> "ReadOnlyObject": """Get the task's arguments in read-only form (deprecated). Returns @@ -1138,7 +1136,7 @@ def command_arguments(self) -> ReadOnlyObject: return {} @property - def _command_arguments(self) -> ReadOnlyObject: + def _command_arguments(self) -> "ReadOnlyObject": return {} @property @@ -1163,7 +1161,7 @@ class ConditionalTask(CommandTask): def __init__( self, - command_source: Workflow, + command_source: "Workflow", task: str, ) -> None: """Initialize ConditionalTask. @@ -1198,7 +1196,7 @@ class CompoundTask(CommandTask): def __init__( self, - command_source: Workflow, + command_source: "Workflow", task: str, ) -> None: """Initialize CompoundTask. diff --git a/src/ansys/fluent/core/workflow_new.py b/src/ansys/fluent/core/workflow_new.py index cb7edce33a03..ff8556ad33cf 100644 --- a/src/ansys/fluent/core/workflow_new.py +++ b/src/ansys/fluent/core/workflow_new.py @@ -43,8 +43,6 @@ simulation workflows, with automatic dependency management and validation. """ -from __future__ import annotations - from collections import OrderedDict from functools import wraps import inspect @@ -360,7 +358,7 @@ def task_names(self): """ return [name.split(":")[0] for name in self._workflow.task_object()] - def children(self) -> list[TaskObject]: + def children(self) -> "list[TaskObject]": """Get the top-level tasks in the workflow in display order. Returns an ordered list of the workflow's main tasks (those directly under @@ -396,7 +394,7 @@ def children(self) -> list[TaskObject]: return wrapped_tasks - def first_child(self) -> TaskObject | None: + def first_child(self) -> "TaskObject | None": """Get the first top-level task in the workflow. Returns @@ -437,7 +435,7 @@ def first_child(self) -> TaskObject | None: self._command_source, ) - def last_child(self) -> TaskObject | None: + def last_child(self) -> "TaskObject | None": """Get the last top-level task in the workflow. Returns @@ -506,7 +504,7 @@ def _ordered_tasks(self): return sorted_dict - def delete_tasks(self, list_of_tasks: list[TaskObject]): + def delete_tasks(self, list_of_tasks: "list[TaskObject]"): """Delete multiple tasks from the workflow. Removes the specified tasks from the workflow. Tasks are identified by TaskObject instances. @@ -537,7 +535,7 @@ def delete_tasks(self, list_of_tasks: list[TaskObject]): self._workflow.general.delete_tasks(list_of_tasks=items_to_be_deleted) @property - def insertable_tasks(self) -> FirstTask: + def insertable_tasks(self) -> "FirstTask": """Tasks that can be inserted into an empty workflow. Returns a helper that exposes the set of valid starting tasks for a blank @@ -688,7 +686,7 @@ def __init__( task_object: PyMenu, base_name: str, workflow: PyMenu, - parent: Workflow | TaskObject, + parent: "Workflow | TaskObject", meshing_root: PyMenu, ): """Initialize a TaskObject wrapper. @@ -812,7 +810,7 @@ def __init__(self, base_task): setattr(self, item, insertable_task) self._insertable_tasks.append(insertable_task) - def __call__(self) -> list[_Insert]: + def __call__(self) -> "list[_Insert]": """Get list of all insertable task objects. Returns diff --git a/tests/test_deprecate.py b/tests/test_deprecate.py index 6c5c2b7ba783..39aee2396c6d 100644 --- a/tests/test_deprecate.py +++ b/tests/test_deprecate.py @@ -20,6 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +import inspect import warnings import pytest @@ -155,3 +156,33 @@ def new_add(a, b): assert "3.0.0" in str(warning.message) assert result == 3 + + +def test_deprecate_arguments_preserves_signature(): + """Test that @deprecate_arguments preserves the original function signature.""" + + @deprecate_arguments(old_args="old_x", new_args="x", version="3.0.0") + def my_func(x: int, y: str = "hello") -> bool: + return True + + sig = inspect.signature(my_func) + param_names = list(sig.parameters.keys()) + assert param_names == ["x", "y"] + assert sig.parameters["x"].annotation is int + assert sig.parameters["y"].default == "hello" + assert sig.return_annotation is bool + + +def test_deprecate_function_preserves_signature(): + """Test that @deprecate_function preserves the original function signature.""" + + @deprecate_function(version="3.0.0", new_func="new_fn") + def old_fn(a: int, b: str = "world") -> float: + return 1.0 + + sig = inspect.signature(old_fn) + param_names = list(sig.parameters.keys()) + assert param_names == ["a", "b"] + assert sig.parameters["a"].annotation is int + assert sig.parameters["b"].default == "world" + assert sig.return_annotation is float