Skip to content

Update generated code for DPF 261_daily on master #2379

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from

Conversation

pyansys-ci-bot
Copy link
Collaborator

An update of generated code has been triggered either manually or by an update in the dpf-standalone repository.

@pyansys-ci-bot pyansys-ci-bot requested a review from a team as a code owner June 18, 2025 13:45
@pyansys-ci-bot pyansys-ci-bot added the server-sync DO NOT USE, Related to automatic synchronization with the server label Jun 18, 2025
Copy link
Contributor

github-actions bot commented Jun 18, 2025

Some tests with 'continue-on-error: true' have failed:

Copy link

codecov bot commented Jun 18, 2025

❌ 48 Tests Failed:

Tests completed Failed Passed Skipped
25650 48 25602 3614
View the top 3 failed test(s) by shortest run time
tests/test_animator.py::test_animator_animate_fields_container_scale_factor_fc
Stack Traces | 0.003s run time
@pytest.fixture()
    def displacement_fields():
        model = dpf.Model(examples.find_msup_transient())
        mesh_scoping = dpf.mesh_scoping_factory.nodal_scoping(
>           model.metadata.meshed_region.nodes.scoping
        )

tests/test_animator.py:55: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
..../test-api/lib/python3.10.../dpf/core/model.py:130: in metadata
    self._metadata = Metadata(self._data_sources, self._server)
..../test-api/lib/python3.10.../dpf/core/model.py:292: in __init__
    self._set_data_sources(data_sources)
..../test-api/lib/python3.10.../dpf/core/model.py:440: in _set_data_sources
    self._cache_streams_provider()
..../test-api/lib/python3.10.../dpf/core/model.py:322: in _cache_streams_provider
    self._stream_provider = Operator("stream_provider", server=self._server)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ansys.dpf.core.dpf_operator.Operator object at 0x7f72d81abfa0>
name = 'stream_provider', config = None
server = <ansys.dpf.core.server_types.InProcessServer object at 0x7f72f369e020>
operator = None

    def __init__(self, name=None, config=None, server=None, operator=None):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
>           raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
E           KeyError: "The operator stream_provider doesn't exist in the registry. Check its spelling in the documentation or verify its availability in your loaded plugins. The current available operator names can be accessed using 'available_operator_names' method."

..../test-api/lib/python3.10.../dpf/core/dpf_operator.py:163: KeyError
tests/test_animator.py::test_animator_animate_fields_container_scale_factor_int
Stack Traces | 0.003s run time
@pytest.fixture()
    def displacement_fields():
        model = dpf.Model(examples.find_msup_transient())
        mesh_scoping = dpf.mesh_scoping_factory.nodal_scoping(
>           model.metadata.meshed_region.nodes.scoping
        )

tests/test_animator.py:55: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
..../test-api/lib/python3.10.../dpf/core/model.py:130: in metadata
    self._metadata = Metadata(self._data_sources, self._server)
..../test-api/lib/python3.10.../dpf/core/model.py:292: in __init__
    self._set_data_sources(data_sources)
..../test-api/lib/python3.10.../dpf/core/model.py:440: in _set_data_sources
    self._cache_streams_provider()
..../test-api/lib/python3.10.../dpf/core/model.py:322: in _cache_streams_provider
    self._stream_provider = Operator("stream_provider", server=self._server)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ansys.dpf.core.dpf_operator.Operator object at 0x7ff282974640>
name = 'stream_provider', config = None
server = <ansys.dpf.core.server_types.InProcessServer object at 0x7ff29e992020>
operator = None

    def __init__(self, name=None, config=None, server=None, operator=None):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
>           raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
E           KeyError: "The operator stream_provider doesn't exist in the registry. Check its spelling in the documentation or verify its availability in your loaded plugins. The current available operator names can be accessed using 'available_operator_names' method."

..../test-api/lib/python3.10.../dpf/core/dpf_operator.py:163: KeyError
tests/test_animator.py::test_animator_animate_fields_container_scale_factor_list
Stack Traces | 0.003s run time
@pytest.fixture()
    def displacement_fields():
        model = dpf.Model(examples.find_msup_transient())
        mesh_scoping = dpf.mesh_scoping_factory.nodal_scoping(
>           model.metadata.meshed_region.nodes.scoping
        )

tests/test_animator.py:55: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
..../test-api/lib/python3.10.../dpf/core/model.py:130: in metadata
    self._metadata = Metadata(self._data_sources, self._server)
..../test-api/lib/python3.10.../dpf/core/model.py:292: in __init__
    self._set_data_sources(data_sources)
..../test-api/lib/python3.10.../dpf/core/model.py:440: in _set_data_sources
    self._cache_streams_provider()
..../test-api/lib/python3.10.../dpf/core/model.py:322: in _cache_streams_provider
    self._stream_provider = Operator("stream_provider", server=self._server)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <ansys.dpf.core.dpf_operator.Operator object at 0x7f72d8149fc0>
name = 'stream_provider', config = None
server = <ansys.dpf.core.server_types.InProcessServer object at 0x7f72f369e020>
operator = None

    def __init__(self, name=None, config=None, server=None, operator=None):
        """Initialize the operator with its name by connecting to a stub."""
        self.name = name
        self._internal_obj = None
        self._description = None
        self._inputs = None
        self._id = None
    
        # step 1: get server
        self._server = server_module.get_or_create_server(
            config._server if isinstance(config, Config) else server
        )
    
        # step 2: get api
        self._api_instance = None  # see _api property
    
        # step 3: init environment
        self._api.init_operator_environment(self)  # creates stub when gRPC
    
        # step 4: if object exists, take the instance, else create it
        if operator is not None:
            if isinstance(operator, Operator):
                core_api = self._server.get_api_for_type(
                    capi=data_processing_capi.DataProcessingCAPI,
                    grpcapi=data_processing_grpcapi.DataProcessingGRPCAPI,
                )
                core_api.init_data_processing_environment(self)
                self._internal_obj = core_api.data_processing_duplicate_object_reference(operator)
                self.name = operator.name
            else:
                self._internal_obj = operator
                self.name = self._api.operator_name(self)
        else:
            if self._server.has_client():
                self._internal_obj = self._api.operator_new_on_client(
                    self.name, self._server.client
                )
            else:
                self._internal_obj = self._api.operator_new(self.name)
    
        if self._internal_obj is None:
>           raise KeyError(
                f"The operator {self.name} doesn't exist in the registry. "
                f"Check its spelling in the documentation or verify its availability "
                f"in your loaded plugins. The current available operator names can be "
                f"accessed using 'available_operator_names' method."
            )
E           KeyError: "The operator stream_provider doesn't exist in the registry. Check its spelling in the documentation or verify its availability in your loaded plugins. The current available operator names can be accessed using 'available_operator_names' method."

..../test-api/lib/python3.10.../dpf/core/dpf_operator.py:163: KeyError

To view more test analytics, go to the Test Analytics Dashboard
📋 Got 3 mins? Take this short survey to help us improve Test Analytics.

Copy link
Contributor

The documentation for this pull request will be available at https://dpf.docs.pyansys.com/pull/2379. Please allow some time for the documentation to be deployed.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
server-sync DO NOT USE, Related to automatic synchronization with the server
Projects
None yet
Development

Successfully merging this pull request may close these issues.

2 participants