From bfe3b14ae7aa2ba6b66a8a6af8135129f33f96c7 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 1 Dec 2025 20:44:59 +0000 Subject: [PATCH] chore: remove code formatting for google dir --- .../%name_%version/%sub/test_%service.py.j2 | 2 +- gapic/schema/metadata.py | 4 +- gapic/schema/wrappers.py | 2 +- .../templates/%namespace/%name/__init__.py.j2 | 12 +- .../%name_%version/%sub/__init__.py.j2 | 119 +- .../%sub/services/%service/__init__.py.j2 | 4 +- .../services/%service/_async_mixins.py.j2 | 112 +- .../%sub/services/%service/_client_macros.j2 | 46 +- .../%sub/services/%service/_mixins.py.j2 | 133 +- .../%sub/services/%service/_shared_macros.j2 | 142 +- .../%sub/services/%service/async_client.py.j2 | 136 +- .../%sub/services/%service/client.py.j2 | 218 +- .../%sub/services/%service/pagers.py.j2 | 44 +- .../%service/transports/__init__.py.j2 | 22 +- .../%service/transports/_mixins.py.j2 | 25 +- .../transports/_rest_mixins_base.py.j2 | 12 +- .../services/%service/transports/base.py.j2 | 146 +- .../services/%service/transports/grpc.py.j2 | 102 +- .../%service/transports/grpc_asyncio.py.j2 | 97 +- .../services/%service/transports/rest.py.j2 | 125 +- .../%service/transports/rest_asyncio.py.j2 | 113 +- .../%service/transports/rest_base.py.j2 | 54 +- .../%name_%version/%sub/types/%proto.py.j2 | 11 +- .../%name_%version/%sub/types/__init__.py.j2 | 4 +- .../%name_%version/%sub/types/_message.py.j2 | 11 +- gapic/templates/examples/feature_fragments.j2 | 2 +- gapic/templates/noxfile.py.j2 | 4 +- .../gapic/%name_%version/%sub/test_macros.j2 | 4 +- .../asset/google/cloud/asset/__init__.py | 159 +- .../asset/google/cloud/asset_v1/__init__.py | 236 +- .../services/asset_service/__init__.py | 4 +- .../services/asset_service/async_client.py | 557 +++-- .../asset_v1/services/asset_service/client.py | 787 +++--- .../asset_v1/services/asset_service/pagers.py | 302 ++- .../asset_service/transports/__init__.py | 16 +- .../services/asset_service/transports/base.py | 457 ++-- .../services/asset_service/transports/grpc.py | 408 ++-- .../asset_service/transports/grpc_asyncio.py | 408 ++-- .../services/asset_service/transports/rest.py | 2043 ++++++++++------ .../asset_service/transports/rest_base.py | 643 +++-- .../google/cloud/asset_v1/types/__init__.py | 154 +- .../cloud/asset_v1/types/asset_service.py | 403 +-- .../google/cloud/asset_v1/types/assets.py | 140 +- tests/integration/goldens/asset/noxfile.py | 2 +- .../unit/gapic/asset_v1/test_asset_service.py | 1272 +++++----- .../google/iam/credentials/__init__.py | 21 +- .../google/iam/credentials_v1/__init__.py | 98 +- .../services/iam_credentials/__init__.py | 4 +- .../services/iam_credentials/async_client.py | 157 +- .../services/iam_credentials/client.py | 311 ++- .../iam_credentials/transports/__init__.py | 16 +- .../iam_credentials/transports/base.py | 127 +- .../iam_credentials/transports/grpc.py | 133 +- .../transports/grpc_asyncio.py | 133 +- .../iam_credentials/transports/rest.py | 388 +-- .../iam_credentials/transports/rest_base.py | 146 +- .../iam/credentials_v1/types/__init__.py | 16 +- .../google/iam/credentials_v1/types/common.py | 20 +- .../credentials_v1/types/iamcredentials.py | 4 +- .../goldens/credentials/noxfile.py | 2 +- .../credentials_v1/test_iam_credentials.py | 314 +-- .../google/cloud/eventarc_v1/__init__.py | 156 +- .../eventarc_v1/services/eventarc/__init__.py | 4 +- .../services/eventarc/async_client.py | 572 +++-- .../eventarc_v1/services/eventarc/client.py | 853 ++++--- .../eventarc_v1/services/eventarc/pagers.py | 173 +- .../services/eventarc/transports/__init__.py | 16 +- .../services/eventarc/transports/base.py | 410 ++-- .../services/eventarc/transports/grpc.py | 359 ++- .../eventarc/transports/grpc_asyncio.py | 359 ++- .../services/eventarc/transports/rest.py | 2172 ++++++++++------- .../services/eventarc/transports/rest_base.py | 713 +++--- .../cloud/eventarc_v1/types/__init__.py | 74 +- .../google/cloud/eventarc_v1/types/channel.py | 9 +- .../eventarc_v1/types/channel_connection.py | 6 +- .../cloud/eventarc_v1/types/discovery.py | 18 +- .../cloud/eventarc_v1/types/eventarc.py | 50 +- .../types/google_channel_config.py | 6 +- .../google/cloud/eventarc_v1/types/trigger.py | 58 +- tests/integration/goldens/eventarc/noxfile.py | 2 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 1084 ++++---- .../logging/google/cloud/logging/__init__.py | 165 +- .../google/cloud/logging_v2/__init__.py | 242 +- .../services/config_service_v2/__init__.py | 4 +- .../config_service_v2/async_client.py | 735 +++--- .../services/config_service_v2/client.py | 979 +++++--- .../services/config_service_v2/pagers.py | 216 +- .../config_service_v2/transports/__init__.py | 10 +- .../config_service_v2/transports/base.py | 621 +++-- .../config_service_v2/transports/grpc.py | 540 ++-- .../transports/grpc_asyncio.py | 540 ++-- .../services/logging_service_v2/__init__.py | 4 +- .../logging_service_v2/async_client.py | 214 +- .../services/logging_service_v2/client.py | 367 +-- .../services/logging_service_v2/pagers.py | 130 +- .../logging_service_v2/transports/__init__.py | 10 +- .../logging_service_v2/transports/base.py | 181 +- .../logging_service_v2/transports/grpc.py | 172 +- .../transports/grpc_asyncio.py | 172 +- .../services/metrics_service_v2/__init__.py | 4 +- .../metrics_service_v2/async_client.py | 199 +- .../services/metrics_service_v2/client.py | 353 +-- .../services/metrics_service_v2/pagers.py | 44 +- .../metrics_service_v2/transports/__init__.py | 10 +- .../metrics_service_v2/transports/base.py | 164 +- .../metrics_service_v2/transports/grpc.py | 158 +- .../transports/grpc_asyncio.py | 158 +- .../google/cloud/logging_v2/types/__init__.py | 152 +- .../cloud/logging_v2/types/log_entry.py | 30 +- .../google/cloud/logging_v2/types/logging.py | 36 +- .../cloud/logging_v2/types/logging_config.py | 249 +- .../cloud/logging_v2/types/logging_metrics.py | 31 +- tests/integration/goldens/logging/noxfile.py | 2 +- ...gging_service_v2_tail_log_entries_async.py | 2 +- ...ogging_service_v2_tail_log_entries_sync.py | 2 +- .../logging_v2/test_config_service_v2.py | 1270 +++++----- .../logging_v2/test_logging_service_v2.py | 188 +- .../logging_v2/test_metrics_service_v2.py | 288 +-- .../google/cloud/logging/__init__.py | 165 +- .../google/cloud/logging_v2/__init__.py | 242 +- .../services/config_service_v2/__init__.py | 4 +- .../config_service_v2/async_client.py | 735 +++--- .../services/config_service_v2/client.py | 979 +++++--- .../services/config_service_v2/pagers.py | 216 +- .../config_service_v2/transports/__init__.py | 10 +- .../config_service_v2/transports/base.py | 621 +++-- .../config_service_v2/transports/grpc.py | 540 ++-- .../transports/grpc_asyncio.py | 540 ++-- .../services/logging_service_v2/__init__.py | 4 +- .../logging_service_v2/async_client.py | 214 +- .../services/logging_service_v2/client.py | 367 +-- .../services/logging_service_v2/pagers.py | 130 +- .../logging_service_v2/transports/__init__.py | 10 +- .../logging_service_v2/transports/base.py | 181 +- .../logging_service_v2/transports/grpc.py | 172 +- .../transports/grpc_asyncio.py | 172 +- .../services/metrics_service_v2/__init__.py | 4 +- .../metrics_service_v2/async_client.py | 199 +- .../services/metrics_service_v2/client.py | 353 +-- .../services/metrics_service_v2/pagers.py | 44 +- .../metrics_service_v2/transports/__init__.py | 10 +- .../metrics_service_v2/transports/base.py | 164 +- .../metrics_service_v2/transports/grpc.py | 158 +- .../transports/grpc_asyncio.py | 158 +- .../google/cloud/logging_v2/types/__init__.py | 152 +- .../cloud/logging_v2/types/log_entry.py | 30 +- .../google/cloud/logging_v2/types/logging.py | 36 +- .../cloud/logging_v2/types/logging_config.py | 249 +- .../cloud/logging_v2/types/logging_metrics.py | 31 +- .../goldens/logging_internal/noxfile.py | 2 +- ...gging_service_v2_tail_log_entries_async.py | 2 +- ...ogging_service_v2_tail_log_entries_sync.py | 2 +- .../logging_v2/test_config_service_v2.py | 1270 +++++----- .../logging_v2/test_logging_service_v2.py | 188 +- .../logging_v2/test_metrics_service_v2.py | 288 +-- .../redis/google/cloud/redis/__init__.py | 59 +- .../redis/google/cloud/redis_v1/__init__.py | 136 +- .../redis_v1/services/cloud_redis/__init__.py | 4 +- .../services/cloud_redis/async_client.py | 392 +-- .../redis_v1/services/cloud_redis/client.py | 548 +++-- .../redis_v1/services/cloud_redis/pagers.py | 44 +- .../cloud_redis/transports/__init__.py | 22 +- .../services/cloud_redis/transports/base.py | 286 ++- .../services/cloud_redis/transports/grpc.py | 260 +- .../cloud_redis/transports/grpc_asyncio.py | 260 +- .../services/cloud_redis/transports/rest.py | 1460 ++++++----- .../cloud_redis/transports/rest_asyncio.py | 1499 +++++++----- .../cloud_redis/transports/rest_base.py | 446 ++-- .../google/cloud/redis_v1/types/__init__.py | 54 +- .../cloud/redis_v1/types/cloud_redis.py | 123 +- tests/integration/goldens/redis/noxfile.py | 2 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 758 +++--- .../google/cloud/redis/__init__.py | 33 +- .../google/cloud/redis_v1/__init__.py | 110 +- .../redis_v1/services/cloud_redis/__init__.py | 4 +- .../services/cloud_redis/async_client.py | 248 +- .../redis_v1/services/cloud_redis/client.py | 404 +-- .../redis_v1/services/cloud_redis/pagers.py | 44 +- .../cloud_redis/transports/__init__.py | 22 +- .../services/cloud_redis/transports/base.py | 184 +- .../services/cloud_redis/transports/grpc.py | 176 +- .../cloud_redis/transports/grpc_asyncio.py | 176 +- .../services/cloud_redis/transports/rest.py | 951 +++++--- .../cloud_redis/transports/rest_asyncio.py | 990 +++++--- .../cloud_redis/transports/rest_base.py | 273 ++- .../google/cloud/redis_v1/types/__init__.py | 28 +- .../cloud/redis_v1/types/cloud_redis.py | 70 +- .../goldens/redis_selective/noxfile.py | 2 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 516 ++-- ...ed_snippets_method_bidi_streaming_async.py | 2 +- ...ted_snippets_method_bidi_streaming_sync.py | 2 +- tests/unit/schema/test_metadata.py | 8 +- tests/unit/schema/wrappers/test_field.py | 6 +- tests/unit/schema/wrappers/test_method.py | 2 +- 194 files changed, 26064 insertions(+), 20505 deletions(-) diff --git a/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ac385e285d..30ae97968d 100644 --- a/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -785,7 +785,7 @@ def test_{{ method_name }}_field_headers(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim("'") }} + {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim('"') }} {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] diff --git a/gapic/schema/metadata.py b/gapic/schema/metadata.py index 7df8d0291f..451c84a575 100644 --- a/gapic/schema/metadata.py +++ b/gapic/schema/metadata.py @@ -313,7 +313,7 @@ def rel(self, address: "Address") -> str: # created, so there is no way for one nested class to reference # another at class instantiation time. if self.parent and address.parent and self.parent[0] == address.parent[0]: - return f"'{'.'.join(self.parent)}.{self.name}'" + return f"\"{'.'.join(self.parent)}.{self.name}\"" # Edge case: Similar to above, if this is a message that is # referencing a nested message that it contains, we need @@ -333,7 +333,7 @@ def rel(self, address: "Address") -> str: # identifier. It is guaranteed to work all the time because # it bumps name resolution until a time when all types in a module # are guaranteed to be fully defined. - return f"'{'.'.join(self.parent + (self.name,))}'" + return f"\"{'.'.join(self.parent + (self.name,))}\"" # Return the usual `module.Name`. return str(self) diff --git a/gapic/schema/wrappers.py b/gapic/schema/wrappers.py index 17a7832756..1e994e6244 100644 --- a/gapic/schema/wrappers.py +++ b/gapic/schema/wrappers.py @@ -309,7 +309,7 @@ def primitive_mock_as_str(self) -> str: answer = self.primitive_mock() if isinstance(answer, str): - answer = f"'{answer}'" + answer = f'"{answer}"' else: answer = str(answer) diff --git a/gapic/templates/%namespace/%name/__init__.py.j2 b/gapic/templates/%namespace/%name/__init__.py.j2 index a5e61f219a..eca0018863 100644 --- a/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/gapic/templates/%namespace/%name/__init__.py.j2 @@ -49,26 +49,24 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' #} __all__ = ( -{%- filter indent %} {% for subpackage, _ in api.subpackages|dictsort %} -'{{ subpackage }}', + "{{ subpackage }}", {% endfor %} {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} -'{{ service.client_name }}', + "{{ service.client_name }}", {% if 'grpc' in opts.transport %} -'{{ service.async_client_name }}', + "{{ service.async_client_name }}", {% endif %} {% endfor %} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view %} {% for message in proto.messages.values()|sort(attribute='name') %} -'{{ message.name }}', + "{{ message.name }}", {% endfor %} {% for enum in proto.enums.values()|sort(attribute='name') if proto.meta.address.subpackage == api.subpackage_view %} -'{{ enum.name }}', + "{{ enum.name }}", {% endfor %}{% endfor %} -{% endfilter %} ) {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 9eb968dd01..ad64391c73 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -46,11 +46,10 @@ from .types.{{ proto.module_name }} import {{ enum.name }} {% endfor %} {% endfor %} -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - {# TODO(api_core): remove `type:ignore` below when minimum version of api_core makes the else clause unnecessary. #} - api_core.check_python_version("{{package_path}}") # type: ignore - api_core.check_dependency_versions("{{package_path}}") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("{{package_path}}") # type: ignore + api_core.check_dependency_versions("{{package_path}}") # type: ignore +else: # pragma: NO COVER {# TODO(api_core): Remove this try-catch when we require api-core at a version that supports the changes in https://github.com/googleapis/python-api-core/pull/832 @@ -66,20 +65,24 @@ else: # pragma: NO COVER _py_version_str = sys.version.split()[0] _package_label = "{{package_path}}" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -117,51 +120,59 @@ else: # pragma: NO COVER _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) {# Define __all__. This requires the full set of imported names, so we iterate over them again. -#} -__all__ = ( - {% filter sort_lines -%} - {% for subpackage in api.subpackages -%} - '{{ subpackage }}', - {% endfor -%} + + +{% with imported_names = [] %} +__all__ = ({% for subpackage in api.subpackages %} + {% do imported_names.append(subpackage) %} + {% endfor %} {% for service in api.services.values() - if service.meta.address.subpackage == api.subpackage_view -%} - '{{ service.client_name }}', + if service.meta.address.subpackage == api.subpackage_view %} + {% do imported_names.append(service.client_name) %} {% if 'grpc' in opts.transport %} - '{{ service.async_client_name }}', + {% do imported_names.append(service.async_client_name) %} {% endif %} - {% endfor -%} + {% endfor %} {% for proto in api.protos.values() - if proto.meta.address.subpackage == api.subpackage_view -%} - {% for message in proto.messages.values()|sort(attribute='name') -%} - '{{ message.name }}', - {% endfor -%} - {% for enum in proto.enums.values() -%} - '{{ enum.name }}', - {% endfor -%} - {% endfor -%} - {% endfilter %} + if proto.meta.address.subpackage == api.subpackage_view %} + {% for message in proto.messages.values()|sort(attribute='name') %} + {% do imported_names.append(message.name) %} + {% endfor %} + {% for enum in proto.enums.values() %} + {% do imported_names.append(enum.name) %} + {% endfor %} + {% endfor %} + {% for name in imported_names|sort %} + "{{ name }}", + {% endfor %} ) +{% endwith %} {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 index 7f0b61b144..ecd83e0657 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 @@ -8,9 +8,9 @@ from .async_client import {{ service.async_client_name }} {% endif %} __all__ = ( - '{{ service.client_name }}', + "{{ service.client_name }}", {% if 'grpc' in opts.transport %} - '{{ service.async_client_name }}', + "{{ service.async_client_name }}", {% endif %} ) {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 60b8f0a7e9..c64bbf4c95 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -37,17 +37,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -89,17 +90,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -144,16 +146,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) {% endif %} {% if "CancelOperation" in api.mixin_api_methods %} @@ -194,16 +198,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) {% endif %} {% if "WaitOperation" in api.mixin_api_methods %} @@ -247,17 +253,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -369,17 +376,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -487,17 +495,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -543,17 +552,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -598,17 +608,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -649,17 +660,18 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 9029ac8eca..5a08022608 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -17,27 +17,28 @@ {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} {% macro client_method(method, name, snippet_index, api, service, full_extended_lro=False) %} - def {{ name }}(self, - {% if not method.client_streaming %} - request: Optional[Union[{{ method.input.ident }}, dict]] = None, - *, - {% for field in method.flattened_fields.values() %} - {{ field.name }}: Optional[{{ field.ident }}] = None, - {% endfor %} - {% else %} - requests: Optional[Iterator[{{ method.input.ident }}]] = None, - *, - {% endif %} - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, - {% if method.extended_lro and not full_extended_lro %}{# This is a hack to preserve backwards compatibility with the "unary" surfaces #} - ) -> {{ method.extended_lro.operation_type.ident }}: - {% elif not method.server_streaming %} - ) -> {{ method.client_output.ident }}: - {% else %} - ) -> Iterable[{{ method.client_output.ident }}]: - {% endif %} + def {{ name }}( + self, + {% if not method.client_streaming %} + request: Optional[Union[{{ method.input.ident }}, dict]] = None, + *, + {% for field in method.flattened_fields.values() %} + {{ field.name }}: Optional[{{ field.ident }}] = None, + {% endfor %} + {% else %} + requests: Optional[Iterator[{{ method.input.ident }}]] = None, + *, + {% endif %} + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, + {% if method.extended_lro and not full_extended_lro %}{# This is a hack to preserve backwards compatibility with the "unary" surfaces #} + ) -> {{ method.extended_lro.operation_type.ident }}: + {% elif not method.server_streaming %} + ) -> {{ method.client_output.ident }}: + {% else %} + ) -> Iterable[{{ method.client_output.ident }}]: + {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8)|trim }} {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} @@ -93,8 +94,7 @@ flattened_params = [{{ method.flattened_fields.values()|join(", ", attribute="name") }}] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") {% endif %} {% if method.input.ident.package != method.ident.package %}{# request lives in a different package, so there is no proto wrapper #} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index 4bfc76e954..06a44e0577 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -1,6 +1,7 @@ {# LRO mixins #} {% if api.has_operations_mixin %} {% if "ListOperations" in api.mixin_api_methods %} + def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, @@ -35,10 +36,7 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -46,7 +44,11 @@ try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -55,8 +57,8 @@ raise e {% endif %} - {% if "GetOperation" in api.mixin_api_methods %} + def get_operation( self, request: Optional[operations_pb2.GetOperationRequest] = None, @@ -91,10 +93,7 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -102,7 +101,11 @@ try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -110,8 +113,8 @@ self._add_cred_info_for_auth_errors(e) raise e {% endif %} - {% if "DeleteOperation" in api.mixin_api_methods %} + def delete_operation( self, request: Optional[operations_pb2.DeleteOperationRequest] = None, @@ -150,19 +153,21 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) {% endif %} - {% if "CancelOperation" in api.mixin_api_methods %} + def cancel_operation( self, request: Optional[operations_pb2.CancelOperationRequest] = None, @@ -200,19 +205,21 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) {% endif %} - {% if "WaitOperation" in api.mixin_api_methods %} + def wait_operation( self, request: Optional[operations_pb2.WaitOperationRequest] = None, @@ -253,10 +260,7 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -264,7 +268,11 @@ try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -273,12 +281,11 @@ raise e {% endif %} {% endif %} {# LRO #} - {# IAM mixins #} - {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if not opts.add_iam_methods and api.has_iam_mixin %} {% if "SetIamPolicy" in api.mixin_api_methods %} + def set_iam_policy( self, request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, @@ -379,10 +386,7 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -390,7 +394,11 @@ try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -398,8 +406,8 @@ self._add_cred_info_for_auth_errors(e) raise e {% endif %} - {% if "GetIamPolicy" in api.mixin_api_methods %} + def get_iam_policy( self, request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, @@ -501,10 +509,7 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -512,7 +517,11 @@ try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -520,8 +529,8 @@ self._add_cred_info_for_auth_errors(e) raise e {% endif %} - {% if "TestIamPermissions" in api.mixin_api_methods %} + def test_iam_permissions( self, request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, @@ -561,10 +570,7 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -572,7 +578,11 @@ try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -581,11 +591,10 @@ raise e {% endif %} {% endif %} - {# Location mixin #} - {% if api.has_location_mixin %} {% if "GetLocation" in api.mixin_api_methods %} + def get_location( self, request: Optional[locations_pb2.GetLocationRequest] = None, @@ -620,10 +629,7 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -631,7 +637,11 @@ try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -639,8 +649,8 @@ self._add_cred_info_for_auth_errors(e) raise e {% endif %} - {% if "ListLocations" in api.mixin_api_methods %} + def list_locations( self, request: Optional[locations_pb2.ListLocationsRequest] = None, @@ -675,10 +685,7 @@ # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -686,7 +693,11 @@ try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index 6db274e82f..962d1f9dd6 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -28,7 +28,7 @@ {% if method_settings is not none %} {% for auto_populated_field in method_settings.auto_populated_fields %} {% if method.input.fields[auto_populated_field].proto3_optional %} - if '{{ auto_populated_field }}' not in request: + if "{{ auto_populated_field }}" not in request: {% else %} if not request.{{ auto_populated_field }}: {% endif %} @@ -78,13 +78,13 @@ except ImportError: # pragma: NO COVER {% endif %} {# if not method.client_streaming #} if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata(header_params),) {% elif method.field_headers %}{# implicit routing #} # Certain fields should be provided within the metadata header; # add these here. + {# Omit formatting for this code block to avoid keeping track of whether there are no headers #} + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( {% if not method.client_streaming %} @@ -94,6 +94,7 @@ except ImportError: # pragma: NO COVER {% endif %}{# not method.client_streaming #} )), ) + # fmt: on {% endif %}{# method.explicit_routing #} {% endmacro %}{# create_metadata #} @@ -105,9 +106,7 @@ except ImportError: # pragma: NO COVER #} {% if service_version %} if HAS_GOOGLE_API_CORE_VERSION_HEADER: # pragma: NO COVER - metadata = tuple(metadata) + ( - version_header.to_api_version_header("{{ service_version }}"), - ) + metadata = tuple(metadata) + (version_header.to_api_version_header("{{ service_version }}"),) {% endif %}{# service_version #} {% endmacro %} @@ -140,14 +139,15 @@ from google.longrunning import operations_pb2 # type: ignore @staticmethod def _get_http_options(): http_options: List[Dict[str, str]] = [ - {%- for rule in rules %}{ - 'method': '{{ rule.method }}', - 'uri': '{{ rule.uri }}', - {% if rule.body %} - 'body': '{{ rule.body }}', - {% endif %}{# rule.body #} - }, - {% endfor %}{# rule in rules #} + {% for rule in rules %} + { + "method": "{{ rule.method }}", + "uri": "{{ rule.uri }}", + {% if rule.body %} + "body": "{{ rule.body }}", + {% endif %}{# rule.body #} + }, + {% endfor %}{# rule in rules #} ] return http_options {% endmacro %} @@ -163,12 +163,12 @@ def _get_http_options(): session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, +): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = {{ await_prefix }}getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, @@ -185,7 +185,7 @@ def _get_http_options(): #} stream=True, {% endif %} - ) + ) return response {% endmacro %} @@ -215,8 +215,8 @@ def _get_http_options(): query_params = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = {% if is_proto_plus_type %}type(request).to_json(request){% else %}json_format.MessageToJson(request){% endif %} @@ -224,14 +224,14 @@ def _get_http_options(): {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2282): Remove try/except and correctly parse request payload. #} request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for {{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}.{{ method_name }}", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "rpcName": "{{ method_name }}", "httpRequest": http_request, @@ -241,7 +241,17 @@ def _get_http_options(): ) # Send the request - response = {{ await_prefix }}{{ async_class_prefix }}{{ service_name }}RestTransport._{{method_name}}._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request{% if body_spec %}, body{% endif %}) + response = {{ await_prefix }}{{ async_class_prefix }}{{ service_name }}RestTransport._{{method_name}}._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + {% if body_spec %} + body, + {% endif %} + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -252,9 +262,9 @@ def _get_http_options(): See issue: https://github.com/googleapis/gapic-generator-python/issues/2116. #} {% if is_async %} content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once version check is added for google-api-core. #} raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore {% else %} @@ -278,10 +288,13 @@ def _get_http_options(): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + {# Turn of code formatting for this block for readability #} + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -289,7 +302,7 @@ def _get_http_options(): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -302,7 +315,7 @@ def _get_http_options(): {% macro prep_wrapped_messages_async_method(api, service) %} def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { {% for method in service.methods.values() %} self.{{ method.transport_safe_name|snake_case }}: self._wrap_method( @@ -401,9 +414,14 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: """ + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2147): Remove the condition below once async rest transport supports the guarded methods. #} - {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, {{ client_method_metadata_argument() }}) -> Tuple[{{method.input.ident}}, {{ client_method_metadata_type() }}]: + {{ async_prefix }}def pre_{{ method.name|snake_case }}( + self, + request: {{method.input.ident}}, + {{ client_method_metadata_argument() }}, + ) -> Tuple[{{method.input.ident}}, {{ client_method_metadata_type() }}]: """Pre-rpc interceptor for {{ method.name|snake_case }} Override in a subclass to manipulate the request or metadata @@ -413,9 +431,15 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: {% if not method.void %} {% if not method.server_streaming %} - {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + {{ async_prefix }}def post_{{ method.name|snake_case }}( + self, + response: {{method.output.ident}}, + ) -> {{method.output.ident}}: {% else %} - {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator) -> rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator: + {{ async_prefix }}def post_{{ method.name|snake_case }}( + self, + response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, + ) -> rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator: {% endif %} """Post-rpc interceptor for {{ method.name|snake_case }} @@ -430,9 +454,17 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: return response {% if not method.server_streaming %} - {{ async_prefix }}def post_{{ method.name|snake_case }}_with_metadata(self, response: {{method.output.ident}}, {{ client_method_metadata_argument() }}) -> Tuple[{{method.output.ident}}, {{ client_method_metadata_type() }}]: + {{ async_prefix }}def post_{{ method.name|snake_case }}_with_metadata( + self, + response: {{method.output.ident}}, + {{ client_method_metadata_argument() }}, + ) -> Tuple[{{method.output.ident}}, {{ client_method_metadata_type() }}]: {% else %} - {{ async_prefix }}def post_{{ method.name|snake_case }}_with_metadata(self, response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, {{ client_method_metadata_argument() }}) -> Tuple[rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, {{ client_method_metadata_type() }}]: + {{ async_prefix }}def post_{{ method.name|snake_case }}_with_metadata( + self, + response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, + {{ client_method_metadata_argument() }}, + ) -> Tuple[rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, {{ client_method_metadata_type() }}]: {% endif %} """Post-rpc interceptor for {{ method.name|snake_case }} @@ -453,7 +485,9 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: {% for name, signature in api.mixin_api_signatures.items() %} {{ async_prefix }}def pre_{{ name|snake_case }}( - self, request: {{signature.request_type}}, {{ client_method_metadata_argument() }} + self, + request: {{signature.request_type}}, + {{ client_method_metadata_argument() }}, ) -> Tuple[{{signature.request_type}}, {{ client_method_metadata_type() }}]: """Pre-rpc interceptor for {{ name|snake_case }} @@ -463,7 +497,8 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: return request, metadata {{ async_prefix }}def post_{{ name|snake_case }}( - self, response: {{signature.response_type}} + self, + response: {{signature.response_type}}, ) -> {{signature.response_type}}: """Post-rpc interceptor for {{ name|snake_case }} @@ -488,7 +523,7 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: @property def {{ name|snake_case }}(self): - return self.{{ name|make_private }}(self._session, self._host, self._interceptor) # type: ignore + return self.{{ name|make_private }}(self._session, self._host, self._interceptor) # type: ignore class {{ name|make_private }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_method_name_prefix }}{{service.name}}RestStub): def __hash__(self): @@ -497,16 +532,15 @@ class {{ name|make_private }}(_Base{{ service.name }}RestTransport._Base{{name}} {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} {{ response_method(body_spec, is_async=is_async, is_streaming_method=None) | indent(4) }} - {{ async_prefix }}def __call__(self, - request: {{ sig.request_type }}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - {{ client_method_metadata_argument()|indent(8) }}={{ client_method_metadata_default_value() }}, - ) -> {{ sig.response_type }}: - - r"""Call the {{- ' ' -}} - {{ (name|snake_case).replace('_',' ')|wrap(width=70, offset=45, indent=8) }} - {{- ' ' -}} method over HTTP. + {{ async_prefix }}def __call__( + self, + request: {{ sig.request_type }}, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + {{ client_method_metadata_argument()|indent(8) }} = {{ client_method_metadata_default_value() }}, + ) -> {{ sig.response_type }}: + r"""Call the {{- ' ' -}}{{ (name|snake_case).replace('_',' ') }}{{- ' ' -}} method over HTTP. Args: request ({{ sig.request_type }}): @@ -542,12 +576,12 @@ class {{ name|make_private }}(_Base{{ service.name }}RestTransport._Base{{name}} response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}.{{ name }}", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "rpcName": "{{ name }}", "httpResponse": http_response, diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 689d3e0620..3c45510595 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -7,7 +7,25 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + {% if service.any_server_streaming %} + AsyncIterable, + Awaitable, + {% endif %} + {% if service.any_client_streaming %} + AsyncIterator, + {% endif %} + Sequence, + Tuple, + Type, + Union, +) {% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} import uuid {% endif %} @@ -22,8 +40,8 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf {{ shared_macros.add_google_api_core_version_header_import(service.version) }} @@ -44,10 +62,10 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -56,12 +74,14 @@ from .client import {{ service.client_name }} try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + {# TODO(yon-mg): handle rest transport async client interaction #} class {{ service.async_client_name }}: """{{ service.meta.doc|rst(width=72, indent=4) }}{% if service.version|length %} @@ -182,12 +202,14 @@ class {{ service.async_client_name }}: get_transport_class = {{ service.client_name }}.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, {{ service.name }}Transport, Callable[..., {{ service.name }}Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, {{ service.name }}Transport, Callable[..., {{ service.name }}Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the {{ (service.async_client_name|snake_case).replace("_", " ") }}. Args: @@ -249,45 +271,47 @@ class {{ service.async_client_name }}: transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `{{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}`.", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "{{ service.meta.address.proto }}", "credentialsType": None, - } + }, ) {% for method in service.methods.values() %} {% with method_name = method.client_method_name|snake_case + "_unary" if method.operation_service else method.client_method_name|snake_case %} - {%+ if not method.server_streaming %}async {% endif %}def {{ method_name }}(self, + {%+ if not method.server_streaming %}async {% endif %}def {{ method_name }}( + self, {% endwith %} - {% if not method.client_streaming %} - request: Optional[Union[{{ method.input.ident }}, dict]] = None, - *, - {% for field in method.flattened_fields.values() %} - {{ field.name }}: Optional[{{ field.ident }}] = None, - {% endfor %} - {% else %} - requests: Optional[AsyncIterator[{{ method.input.ident }}]] = None, - *, - {% endif %} - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, - {% if not method.server_streaming %} - ) -> {{ method.client_output_async.ident }}: - {% else %} - ) -> Awaitable[AsyncIterable[{{ method.client_output_async.ident }}]]: - {% endif %} + {% if not method.client_streaming %} + request: Optional[Union[{{ method.input.ident }}, dict]] = None, + *, + {% for field in method.flattened_fields.values() %} + {{ field.name }}: Optional[{{ field.ident }}] = None, + {% endfor %} + {% else %} + requests: Optional[AsyncIterator[{{ method.input.ident }}]] = None, + *, + {% endif %} + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, + {% if not method.server_streaming %} + ) -> {{ method.client_output_async.ident }}: + {% else %} + ) -> Awaitable[AsyncIterable[{{ method.client_output_async.ident }}]]: + {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8)|trim }} {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=False) %} @@ -343,8 +367,7 @@ class {{ service.async_client_name }}: flattened_params = [{{ method.flattened_fields.values()|join(", ", attribute="name") }}] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") {% endif %} {% if method.input.ident.package != method.ident.package %} {# request lives in a different package, so there is no proto wrapper #} @@ -539,15 +562,18 @@ class {{ service.async_client_name }}: # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -653,15 +679,18 @@ class {{ service.async_client_name }}: # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -705,15 +734,18 @@ class {{ service.async_client_name }}: # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -725,13 +757,13 @@ class {{ service.async_client_name }}: async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "{{ service.async_client_name }}", -) +__all__ = ("{{ service.async_client_name }}",) + {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 0d60f47bb6..6c6acec71c 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -15,7 +15,25 @@ import json import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + {% if service.any_server_streaming %} + Iterable, + {% endif %} + {% if service.any_client_streaming %} + Iterator, + {% endif %} + Sequence, + Tuple, + Type, + Union, + cast, +) {% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} import uuid {% endif %} @@ -31,11 +49,11 @@ from google.api_core import extended_operation {% endif %} from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf {{ shared_macros.add_google_api_core_version_header_import(service.version) }} try: @@ -45,6 +63,7 @@ except AttributeError: # pragma: NO COVER try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -63,10 +82,10 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -78,11 +97,13 @@ from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} from .transports.rest import {{ service.name }}RestTransport {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if rest_async_io_enabled %} + try: from .transports.rest_asyncio import Async{{ service.name }}RestTransport + HAS_ASYNC_REST_DEPENDENCIES = True {# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} -except ImportError as e: # pragma: NO COVER +except ImportError as e: # pragma: NO COVER HAS_ASYNC_REST_DEPENDENCIES = False ASYNC_REST_EXCEPTION = e @@ -97,6 +118,7 @@ class {{ service.client_name }}Meta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] {% if "grpc" in opts.transport %} _transport_registry["grpc"] = {{ service.grpc_transport_name }} @@ -111,9 +133,10 @@ class {{ service.client_name }}Meta(type): {% endif %}{# if rest_async_io_enabled #} {% endif %} - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[{{ service.name }}Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[{{ service.name }}Transport]: """Returns an appropriate transport class. Args: @@ -156,9 +179,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -167,7 +188,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") @@ -175,9 +197,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = {% if service.host %}"{{ service.host }}"{% else %}None{% endif %} - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = {% if service.host %}"{{ service.host.replace("googleapis.com", "{UNIVERSE_DOMAIN}") }}"{% else %}None{% endif %} @@ -192,21 +212,19 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -241,7 +259,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ service.client_name }}: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -260,13 +279,29 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for message in service.resource_messages|sort(attribute="resource_type") %} @staticmethod - def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: + {% if message.resource_path_args|length > 0 %} + def {{ message.resource_type|snake_case }}_path( + {% for arg in message.resource_path_args %} + {{ arg }}: str, + {% endfor %} + ) -> str: """Returns a fully-qualified {{ message.resource_type|snake_case }} string.""" - return "{{ message.resource_path_formatted }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + return "{{ message.resource_path_formatted }}".format( + {% for arg in message.resource_path_args %} + {{ arg }}={{ arg }}, + {% endfor %} + ) + {% else %} + def {{ message.resource_type|snake_case }}_path() -> str: + """Returns a fully-qualified {{ message.resource_type|snake_case }} string.""" + return "{{ message.resource_path_formatted }}".format() + {% endif %} @staticmethod - def parse_{{ message.resource_type|snake_case }}_path(path: str) -> Dict[str,str]: + def parse_{{ message.resource_type|snake_case }}_path( + path: str, + ) -> Dict[str, str]: """Parses a {{ message.resource_type|snake_case }} path into its component segments.""" m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} @@ -274,12 +309,28 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %}{# resources #} {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") %} @staticmethod - def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: + {% if resource_msg.message_type.resource_path_args|length > 0 %} + def common_{{ resource_msg.message_type.resource_type|snake_case }}_path( + {% for arg in resource_msg.message_type.resource_path_args %} + {{ arg }}: str, + {% endfor %} + ) -> str: """Returns a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" - return "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + return "{{ resource_msg.message_type.resource_path }}".format( + {% for arg in resource_msg.message_type.resource_path_args %} + {{ arg }}={{ arg }}, + {% endfor %} + ) + {% else %} + def common_{{ resource_msg.message_type.resource_type|snake_case }}_path() -> str: + """Returns a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" + return "{{ resource_msg.message_type.resource_path }}".format() + {% endif %} @staticmethod - def parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(path: str) -> Dict[str,str]: + def parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path( + path: str, + ) -> Dict[str, str]: """Parse a {{ resource_msg.message_type.resource_type|snake_case }} path into its component segments.""" m = re.match(r"{{ resource_msg.message_type.path_regex_str }}", path) return m.groupdict() if m else {} @@ -287,7 +338,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %}{# common resources #} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -319,8 +373,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = {{ service.client_name }}._use_client_cert_effective() @@ -450,7 +506,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -490,12 +546,14 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, {{ service.name }}Transport, Callable[..., {{ service.name }}Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, {{ service.name }}Transport, Callable[..., {{ service.name }}Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the {{ (service.client_name|snake_case).replace('_', ' ') }}. Args: @@ -558,12 +616,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = {{ service.client_name }}._read_environment_variables() self._client_cert_source = {{ service.client_name }}._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = {{ service.client_name }}._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -583,22 +641,22 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if transport_provided: # transport is a {{ service.name }}Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast({{ service.name }}Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - {{ service.client_name }}._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or {{ service.client_name }}._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} @@ -617,7 +675,6 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, - } provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] if provided_unsupported_params: @@ -663,15 +720,17 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `{{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}`.", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "{{ service.meta.address.proto }}", "credentialsType": None, - } + }, ) @@ -697,11 +756,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): and may cause errors in other clients! """ self.transport.close() - {% include '%namespace/%name_%version/%sub/services/%service/_mixins.py.j2' %} - {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} + def set_iam_policy( self, request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, @@ -806,10 +864,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -817,7 +872,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -930,10 +989,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -941,7 +997,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -992,10 +1052,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1003,7 +1060,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1012,12 +1073,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise e {% endif %} + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "{{ service.client_name }}", -) +__all__ = ("{{ service.client_name }}",) {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 0d7b1dad8b..aa26001fff 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -12,6 +12,7 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -48,14 +49,17 @@ class {{ method.name }}Pager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., {{ method.output.ident }}], - request: {{ method.input.ident }}, - response: {{ method.output.ident }}, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - {{ shared_macros.client_method_metadata_argument()|indent(4) }} = {{ shared_macros.client_method_metadata_default_value() }}): + + def __init__( + self, + method: Callable[..., {{ method.output.ident }}], + request: {{ method.input.ident }}, + response: {{ method.output.ident }}, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + {{ shared_macros.client_method_metadata_argument()|indent(4) }} = {{ shared_macros.client_method_metadata_default_value() }} + ): """Instantiate the pager. Args: @@ -102,7 +106,7 @@ class {{ method.name }}Pager: {% endif %} def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) {# TODO(yon-mg): remove on rest async transport impl #} @@ -124,14 +128,17 @@ class {{ method.name }}AsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[{{ method.output.ident }}]], - request: {{ method.input.ident }}, - response: {{ method.output.ident }}, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - {{ shared_macros.client_method_metadata_argument()|indent(4) }} = {{ shared_macros.client_method_metadata_default_value() }}): + + def __init__( + self, + method: Callable[..., Awaitable[{{ method.output.ident }}]], + request: {{ method.input.ident }}, + response: {{ method.output.ident }}, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + {{ shared_macros.client_method_metadata_argument()|indent(4) }} = {{ shared_macros.client_method_metadata_default_value() }} + ): """Instantiates the pager. Args: @@ -163,6 +170,7 @@ class {{ method.name }}AsyncPager: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + {% if method.paged_result_field.map %} def __aiter__(self) -> Iterator[Tuple[str, {{ method.paged_result_field.type.fields.get('value').ident }}]]: async def async_generator(): @@ -186,7 +194,7 @@ class {{ method.name }}AsyncPager: {% endif %} def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) {% endif %} {% endfor %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 index 9745b08d78..5bcdc4c9b1 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 @@ -16,11 +16,13 @@ from .grpc_asyncio import {{ service.name }}GrpcAsyncIOTransport from .rest import {{ service.name }}RestTransport from .rest import {{ service.name }}RestInterceptor {% if rest_async_io_enabled %} + ASYNC_REST_CLASSES: Tuple[str, ...] try: from .rest_asyncio import Async{{ service.name }}RestTransport from .rest_asyncio import Async{{ service.name }}RestInterceptor - ASYNC_REST_CLASSES = ('Async{{ service.name }}RestTransport', 'Async{{ service.name }}RestInterceptor') + + ASYNC_REST_CLASSES = ("Async{{ service.name }}RestTransport", "Async{{ service.name }}RestInterceptor") HAS_REST_ASYNC = True except ImportError: # pragma: NO COVER ASYNC_REST_CLASSES = () @@ -33,26 +35,26 @@ except ImportError: # pragma: NO COVER # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] {% if 'grpc' in opts.transport %} -_transport_registry['grpc'] = {{ service.name }}GrpcTransport -_transport_registry['grpc_asyncio'] = {{ service.name }}GrpcAsyncIOTransport +_transport_registry["grpc"] = {{ service.name }}GrpcTransport +_transport_registry["grpc_asyncio"] = {{ service.name }}GrpcAsyncIOTransport {% endif %} {% if 'rest' in opts.transport %} -_transport_registry['rest'] = {{ service.name }}RestTransport +_transport_registry["rest"] = {{ service.name }}RestTransport {% if rest_async_io_enabled %} if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry['rest_asyncio'] = Async{{ service.name }}RestTransport + _transport_registry["rest_asyncio"] = Async{{ service.name }}RestTransport {% endif %}{# if rest_async_io_enabled #} {% endif %} __all__ = ( - '{{ service.name }}Transport', + "{{ service.name }}Transport", {% if 'grpc' in opts.transport %} - '{{ service.name }}GrpcTransport', - '{{ service.name }}GrpcAsyncIOTransport', + "{{ service.name }}GrpcTransport", + "{{ service.name }}GrpcAsyncIOTransport", {% endif %} {% if 'rest' in opts.transport %} - '{{ service.name }}RestTransport', - '{{ service.name }}RestInterceptor', + "{{ service.name }}RestTransport", + "{{ service.name }}RestInterceptor", {% endif %} ){% if 'rest' in opts.transport and rest_async_io_enabled%} + ASYNC_REST_CLASSES{%endif%} {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 index 3458cc78e8..229f8688c5 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 @@ -25,8 +25,7 @@ def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -45,8 +44,7 @@ def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -65,8 +63,7 @@ def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -85,8 +82,7 @@ def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -105,8 +101,7 @@ def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -129,8 +124,7 @@ def list_locations( self, ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -149,8 +143,7 @@ def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -228,9 +221,7 @@ @property def test_iam_permissions( self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: + ) -> Callable[[iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control policy for a function. If the function does not exist, this will diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 index 16cc77ea93..43cb7a4112 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 @@ -29,22 +29,24 @@ def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} - {%- if body_spec %} + {% if body_spec %} @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body - {%- endif %} {# body_spec #} + {% endif %} {# body_spec #} @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params {% endfor %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 7cdaf82da5..b597d592f3 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -17,7 +17,7 @@ from google.api_core import retry as retries from google.api_core import operations_v1 {% endif %} from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf {% filter sort_lines %} @@ -35,13 +35,13 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} {% set import_ns.has_operations_mixin = True %} {% endif %} {% if import_ns.has_operations_mixin %} -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} {% filter sort_lines %} @@ -59,31 +59,35 @@ if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" + {# Omit formatting for this code block to avoid keeping track of whether this dict is empty #} + # fmt: off AUTH_SCOPES = ( {% for scope in service.oauth_scopes %} - '{{ scope }}', + "{{ scope }}", {% endfor %} ) + # fmt: on - DEFAULT_HOST: str = '{% if service.host %}{{ service.host }}{% endif %}' + DEFAULT_HOST: str = "{% if service.host %}{{ service.host }}{% endif %}" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {{ " " }}The hostname to connect to {% if service.host %}(default: "{{ service.host }}"){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -122,10 +126,10 @@ class {{ service.name }}Transport(abc.ABC): if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -133,15 +137,19 @@ class {{ service.name }}Transport(abc.ABC): credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -183,13 +191,13 @@ class {{ service.name }}Transport(abc.ABC): default_timeout=None, client_info=client_info, ), - {% endfor %} {# method_name in api.mixin_api_methods.keys() #} + {% endfor %}{# method_name in api.mixin_api_methods.keys() #} } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ @@ -204,14 +212,20 @@ class {{ service.name }}Transport(abc.ABC): {% endif %} {% for method in service.methods.values() %} + {# Turn off code formatting here to avoid calculating line length to determine whether to wrap code #} + # fmt: off @property - def {{ method.transport_safe_name|snake_case }}(self) -> Callable[ - [{{ method.input.ident }}], - Union[ - {{ method.output.ident }}, - Awaitable[{{ method.output.ident }}] - ]]: + def {{ method.transport_safe_name|snake_case }}( + self, + ) -> Callable[ + [{{ method.input.ident }}], + Union[ + {{ method.output.ident }}, + Awaitable[{{ method.output.ident }}] + ], + ]: raise NotImplementedError() + # fmt: on {% endfor %} {% if api.has_operations_mixin %} @@ -230,10 +244,7 @@ class {{ service.name }}Transport(abc.ABC): @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() {% endif %} @@ -241,10 +252,7 @@ class {{ service.name }}Transport(abc.ABC): @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() {% endif %} @@ -252,10 +260,7 @@ class {{ service.name }}Transport(abc.ABC): @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: raise NotImplementedError() {% endif %} @@ -263,10 +268,7 @@ class {{ service.name }}Transport(abc.ABC): @property def wait_operation( self, - ) -> Callable[ - [operations_pb2.WaitOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.WaitOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() {% endif %} {% endif %} @@ -277,10 +279,7 @@ class {{ service.name }}Transport(abc.ABC): @property def set_iam_policy( self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]]]: raise NotImplementedError() {% endif %} @@ -288,10 +287,7 @@ class {{ service.name }}Transport(abc.ABC): @property def get_iam_policy( self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]]]: raise NotImplementedError() {% endif %} @@ -313,21 +309,17 @@ class {{ service.name }}Transport(abc.ABC): {% if api.has_location_mixin %} {% if "GetLocation" in api.mixin_api_methods %} @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]]]: raise NotImplementedError() {% endif %} {% if "ListLocations" in api.mixin_api_methods %} @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]]]: raise NotImplementedError() {% endif %} {% endif %} @@ -337,31 +329,19 @@ class {{ service.name }}Transport(abc.ABC): @property def set_iam_policy( self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]]]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]]]: raise NotImplementedError() @property def test_iam_permissions( self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: + ) -> Callable[[iam_policy_pb2.TestIamPermissionsRequest], Union[iam_policy_pb2.TestIamPermissionsResponse, Awaitable[iam_policy_pb2.TestIamPermissionsResponse]]]: raise NotImplementedError() {% endif %} @@ -385,7 +365,5 @@ class {{ service.name }}Transport(abc.ABC): {% endfor %} -__all__ = ( - '{{ service.name }}Transport', -) +__all__ = ("{{ service.name }}Transport",) {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 23e7e19839..af5ed8f314 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -15,7 +15,7 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 {% endif %} from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -39,19 +39,20 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} {% set import_ns.has_operations_mixin = True %} {% endif %} {% if import_ns.has_operations_mixin %} -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -84,7 +85,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "rpcName": client_call_details.method, "response": grpc_response, @@ -107,28 +108,31 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str{% if service.host %} = "{{ service.host }}"{% endif %}, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {{ " " }}The hostname to connect to {% if service.host %}(default: "{{ service.host }}"){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -205,7 +209,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -214,7 +219,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -249,20 +255,22 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str{% if service.host %} = "{{ service.host }}"{% endif %}, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -298,13 +306,12 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel {% if service.has_lro %} @@ -318,9 +325,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsClient(self._logged_channel) # Return the client from cache. return self._operations_client @@ -328,13 +333,13 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): {% for method in service.methods.values() %} @property - def {{ method.transport_safe_name|snake_case }}(self) -> Callable[ - [{{ method.input.ident }}], - {{ method.output.ident }}]: - r"""Return a callable for the{{ ' ' }} + def {{ method.transport_safe_name|snake_case }}( + self, + ) -> Callable[[{{ method.input.ident }}], {{ method.output.ident }}]: + r"""Return a callable for the{{ " " }} {{- (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) -}} - {{ ' ' }}method over gRPC. + {{ " " }}method over gRPC. {{ method.meta.doc|rst(width=72, indent=8) }} @@ -348,13 +353,13 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: - self._stubs['{{ method.transport_safe_name|snake_case }}'] = self._logged_channel.{{ method.grpc_stub_type }}( - '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', + if "{{ method.transport_safe_name|snake_case }}" not in self._stubs: + self._stubs["{{ method.transport_safe_name|snake_case }}"] = self._logged_channel.{{ method.grpc_stub_type }}( + "/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}", request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) - return self._stubs['{{ method.transport_safe_name|snake_case }}'] + return self._stubs["{{ method.transport_safe_name|snake_case }}"] {% endfor %} {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} @@ -414,7 +419,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def test_iam_permissions( self, ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -450,7 +456,5 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): return "grpc" -__all__ = ( - '{{ service.name }}GrpcTransport', -) +__all__ = ("{{ service.name }}GrpcTransport",) {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index d5f692442f..6792164ac8 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -19,13 +19,13 @@ from google.api_core import retry_async as retries {% if service.has_lro %} from google.api_core import operations_v1 {% endif %} -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore {% filter sort_lines %} @@ -43,13 +43,13 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} {% set import_ns.has_operations_mixin = True %} {% endif %} {% if import_ns.has_operations_mixin %} -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -57,6 +57,7 @@ from .grpc import {{ service.name }}GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -89,7 +90,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pra } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -117,13 +118,15 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str{% if service.host %} = "{{ service.host }}"{% endif %}, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -154,29 +157,31 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str{% if service.host %} = "{{ service.host }}"{% endif %}, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {{ " " }}The hostname to connect to {% if service.host %}(default: "{{ service.host }}"){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -253,7 +258,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -262,7 +268,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -323,9 +330,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsAsyncClient(self._logged_channel) # Return the client from cache. return self._operations_client @@ -333,13 +338,13 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): {% for method in service.methods.values() %} @property - def {{ method.transport_safe_name|snake_case }}(self) -> Callable[ - [{{ method.input.ident }}], - Awaitable[{{ method.output.ident }}]]: - r"""Return a callable for the{{ ' ' }} + def {{ method.transport_safe_name|snake_case }}( + self, + ) -> Callable[[{{ method.input.ident }}], Awaitable[{{ method.output.ident }}]]: + r"""Return a callable for the{{ " " }} {{- (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) -}} - {{ ' ' }}method over gRPC. + {{ " " }}method over gRPC. {{ method.meta.doc|rst(width=72, indent=8) }} @@ -353,13 +358,13 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: - self._stubs['{{ method.transport_safe_name|snake_case }}'] = self._logged_channel.{{ method.grpc_stub_type }}( - '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', + if "{{ method.transport_safe_name|snake_case }}" not in self._stubs: + self._stubs["{{ method.transport_safe_name|snake_case }}"] = self._logged_channel.{{ method.grpc_stub_type }}( + "/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}", request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) - return self._stubs['{{ method.transport_safe_name|snake_case }}'] + return self._stubs["{{ method.transport_safe_name|snake_case }}"] {% endfor %} {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} @@ -459,7 +464,5 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): {% include '%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2' %} -__all__ = ( - '{{ service.name }}GrpcAsyncIOTransport', -) +__all__ = ("{{ service.name }}GrpcAsyncIOTransport",) {% endblock %} \ No newline at end of file diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 95efafb389..7e2b2a8c17 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -24,7 +24,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} from requests import __version__ as requests_version @@ -44,6 +44,7 @@ except AttributeError: # pragma: NO COVER try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -83,31 +84,32 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): """ {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} - def __init__(self, *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[{{ service.name }}RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str{% if service.host %} = "{{ service.host }}"{% endif %}, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[{{ service.name }}RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - {% if not opts.rest_numeric_enums %} - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - {% endif %} + {% if not opts.rest_numeric_enums %} + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + {% endif %} Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {{ " " }}The hostname to connect to {% if service.host %}(default: "{{ service.host }}"){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -146,10 +148,9 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) {% if service.has_lro %} self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None {% endif %} @@ -172,13 +173,13 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): http_options: Dict[str, List[Dict[str, str]]] = { {% for selector, rules in api.http_options.items() %} {% if selector.startswith('google.longrunning.Operations') %} - '{{ selector }}': [ + "{{ selector }}": [ {% for rule in rules %} { - 'method': '{{ rule.method }}', - 'uri': '{{ rule.uri }}', + "method": "{{ rule.method }}", + "uri": "{{ rule.uri }}", {% if rule.body %} - 'body': '{{ rule.body }}', + "body": "{{ rule.body }}", {% endif %}{# rule.body #} }, {% endfor %}{# rules #} @@ -188,12 +189,13 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="{{ service.client_package_version }}") + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="{{ service.client_package_version }}", + ) self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -212,21 +214,20 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {{ shared_macros.response_method(body_spec, is_async=False, is_streaming_method=method.server_streaming)|indent(8) }} {% endif %}{# method.http_options and not method.client_streaming #} - def __call__(self, - request: {{method.input.ident}}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - {{ shared_macros.client_method_metadata_argument()|indent(8) }}={{ shared_macros.client_method_metadata_default_value() }}, - ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: - {% if method.http_options and not method.client_streaming %} - r"""Call the {{- ' ' -}} - {{ (method.name|snake_case).replace('_',' ')|wrap( - width=70, offset=45, indent=8) }} - {{- ' ' -}} method over HTTP. + def __call__( + self, + request: {{method.input.ident}}, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, + ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: + {% if method.http_options and not method.client_streaming %} + r"""Call the {{- " " -}}{{ (method.name|snake_case).replace('_',' ') }}{{- " " -}}method over HTTP. Args: request (~.{{ method.input.ident }}): - The request object.{{ ' ' }} + The request object.{{ " " }} {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -277,16 +278,16 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): response_payload = None {% endif %}{# if not method.server_streaming #} http_response = { - {# Not logging response payload for server streaming here. See comment above. #} - {% if not method.server_streaming %} - "payload": response_payload, - {% endif %}{# if not method.server_streaming #} - "headers": dict(response.headers), - "status": response.status_code, + {# Not logging response payload for server streaming here. See comment above. #} + {% if not method.server_streaming %} + "payload": response_payload, + {% endif %}{# if not method.server_streaming #} + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}.{{ method.transport_safe_name|snake_case }}", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "rpcName": "{{ method.name }}", {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up #} @@ -297,7 +298,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): return resp {% endif %}{# method.void #} - {% else %}{# method.http_options and not method.client_streaming #} + {% else %}{# method.http_options and not method.client_streaming #} raise NotImplementedError( "Method {{ method.name }} is not available over REST transport" ) @@ -305,14 +306,16 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endfor %} {% for method in service.methods.values()|sort(attribute="name") %} + {# Turn off code formatting here to avoid calculating line length to determine whether to wrap code #} + # fmt: off @property - def {{method.transport_safe_name|snake_case}}(self) -> Callable[ - [{{method.input.ident}}], - {{method.output.ident}}]: + def {{method.transport_safe_name|snake_case}}( + self + ) -> Callable[[{{method.input.ident}}], {{method.output.ident}}]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore - + return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore + # fmt: on {% endfor %} {% include '%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2' %} @@ -325,7 +328,5 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): self._session.close() -__all__=( - '{{ service.name }}RestTransport', -) +__all__ = ("{{ service.name }}RestTransport",) {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 1d6ec87374..bcb453d6ea 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -7,15 +7,18 @@ import google.auth + try: - import aiohttp # type: ignore - from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore - from google.api_core import rest_streaming_async # type: ignore - from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore {# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2200): Add coverage for ImportError. #} except ImportError as e: # pragma: NO COVER - raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install {{ api.naming.warehouse_package_name }}[async_rest]`") from e + raise ImportError( + "`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install {{ api.naming.warehouse_package_name }}[async_rest]`" + ) from e from google.auth.aio import credentials as ga_credentials_async # type: ignore @@ -29,7 +32,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} from google.api_core import retry_async as retries from google.api_core import rest_helpers @@ -45,7 +48,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} import json # type: ignore @@ -63,6 +66,7 @@ import logging try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -91,6 +95,7 @@ class Async{{service.name}}RestStub: _host: str _interceptor: Async{{service.name}}RestInterceptor + class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): """Asynchronous REST backend transport for {{ service.name }}. @@ -102,25 +107,27 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, - *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[ga_credentials_async.Credentials] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - url_scheme: str = 'https', - interceptor: Optional[Async{{ service.name }}RestInterceptor] = None, - ) -> None: + + def __init__( + self, + *, + host: str{% if service.host %} = "{{ service.host }}"{% endif %}, + credentials: Optional[ga_credentials_async.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + url_scheme: str = "https", + interceptor: Optional[Async{{ service.name }}RestInterceptor] = None, + ) -> None: """Instantiate the transport. - {% if not opts.rest_numeric_enums %} - NOTE: This async REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - {% endif %} + {% if not opts.rest_numeric_enums %} + NOTE: This async REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + {% endif %} Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {{ " " }}The hostname to connect to {% if service.host %}(default: "{{ service.host }}"){% endif %}. credentials (Optional[google.auth.aio.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -142,7 +149,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): client_info=client_info, always_use_jwt_access=False, url_scheme=url_scheme, - api_audience=None + api_audience=None, ) {# Note: Type for creds is ignored because of incorrect type hint for creds in the client layer. # TODO(https://github.com/googleapis/gapic-generator-python/issues/2177): Remove `# type: ignore` once @@ -172,13 +179,15 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {{ shared_macros.response_method(body_spec, is_async=True, is_streaming_method=None)|indent(8) }} {% endif %}{# method.http_options and not method.client_streaming and not method.paged_result_field #} - async def __call__(self, - request: {{method.input.ident}}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - {{ shared_macros.client_method_metadata_argument()|indent(12) }}={{ shared_macros.client_method_metadata_default_value() }}, - {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Update return type for client streaming method. #} - ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming_async.AsyncResponseIterator{% endif %}{% endif %}: + async def __call__( + self, + request: {{method.input.ident}}, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + {{ shared_macros.client_method_metadata_argument()|indent(12) }} = {{ shared_macros.client_method_metadata_default_value() }}, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Update return type for client streaming method. #} + ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming_async.AsyncResponseIterator{% endif %}{% endif %}: {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Implement client streaming method. #} {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} @@ -188,7 +197,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): Args: request (~.{{ method.input.ident }}): - The request object.{{ ' ' }} + The request object.{{ " " }} {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. @@ -236,16 +245,16 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): response_payload = None {% endif %}{# if not method.server_streaming #} http_response = { - {# Not logging response payload for server streaming here. See comment above. #} - {% if not method.server_streaming %} + {# Not logging response payload for server streaming here. See comment above. #} + {% if not method.server_streaming %} "payload": response_payload, - {% endif %}{# if not method.server_streaming #} - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + {% endif %}{# if not method.server_streaming #} + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}.{{ method.transport_safe_name|snake_case }}", - extra = { + extra={ "serviceName": "{{ service.meta.address.proto }}", "rpcName": "{{ method.name }}", "metadata": http_response["headers"], @@ -279,13 +288,13 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): http_options: Dict[str, List[Dict[str, str]]] = { {% for selector, rules in api.http_options.items() %} {% if selector.startswith('google.longrunning.Operations') %} - '{{ selector }}': [ + "{{ selector }}": [ {% for rule in rules %} { - 'method': '{{ rule.method }}', - 'uri': '{{ rule.uri }}', + "method": "{{ rule.method }}", + "uri": "{{ rule.uri }}", {% if rule.body %} - 'body': '{{ rule.body }}', + "body": "{{ rule.body }}", {% endif %}{# rule.body #} }, {% endfor %}{# rules #} @@ -295,15 +304,15 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): } rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore - host=self._host, - # use the credentials which are saved - {# Note: Type for creds is ignored because of incorrect type hint for creds in the client layer. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2177): Remove `# type: ignore` once - # we update the type hints for credentials to include asynchronous credentials in the client layer. - #} - credentials=self._credentials, # type: ignore - http_options=http_options, - path_prefix="{{ service.client_package_version }}" + host=self._host, + # use the credentials which are saved + {# Note: Type for creds is ignored because of incorrect type hint for creds in the client layer. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2177): Remove `# type: ignore` once + # we update the type hints for credentials to include asynchronous credentials in the client layer. + #} + credentials=self._credentials, # type: ignore + http_options=http_options, + path_prefix="{{ service.client_package_version }}", ) self._operations_client = AsyncOperationsRestClient(transport=rest_transport) @@ -315,9 +324,9 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% for method in service.methods.values()|sort(attribute="name") %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2154): Remove `type: ignore`. #} @property - def {{method.transport_safe_name|snake_case}}(self) -> Callable[ - [{{method.input.ident}}], - {{method.output.ident}}]: + def {{method.transport_safe_name|snake_case}}( + self, + ) -> Callable[[{{method.input.ident}}], {{method.output.ident}}]: return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore {% endfor %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 index b79785afc5..83b34f5a54 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 @@ -28,7 +28,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% if api.has_location_mixin %} -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore {% endif %} from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO @@ -53,18 +53,20 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): {# TODO: handle mtls stuff if that is relevant for rest transport #} - def __init__(self, *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str{% if service.host %} = "{{ service.host }}"{% endif %}, + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {{ " " }}The hostname to connect to {% if service.host %}(default: "{{ service.host }}"){% endif %}. {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2173): Type hint for credentials is # set to `Any` to support async and sync credential types in the parent rest transport classes. # However, we should have a stronger type here such as an abstract base credentials @@ -100,7 +102,7 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) {% for method in service.methods.values()|sort(attribute="name") %} @@ -110,11 +112,14 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): {% if method.http_options and not method.client_streaming %} {% if method.input.required_fields %} - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + {# Omit formatting for this code block to avoid keeping track of whether this dict is empty #} + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { {% for req_field in method.input.required_fields if req_field.name in method.query_params %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} + "{{ req_field.name | camel_case }}": {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %}, {% endfor %} } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -136,26 +141,28 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): return transcoded_request {% set body_spec = method.http_options[0].body %} - {%- if body_spec %} + {% if body_spec %} @staticmethod def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums={{ opts.rest_numeric_enums }} + transcoded_request["body"], + use_integers_for_enums={{ opts.rest_numeric_enums }}, ) return body - {%- endif %}{# body_spec #} + {% endif %}{# body_spec #} @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums={{ opts.rest_numeric_enums }}, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums={{ opts.rest_numeric_enums }}, + ) + ) {% if method.input.required_fields %} query_params.update(_Base{{ service.name }}RestTransport._Base{{method.name}}._get_unset_required_fields(query_params)) {% endif %}{# required fields #} @@ -171,7 +178,6 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): {% include '%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2' %} -__all__=( - '_Base{{ service.name }}RestTransport', -) +__all__ = ("_Base{{ service.name }}RestTransport",) + {% endblock %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 index 01f5291293..efbd88e24d 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 @@ -18,20 +18,23 @@ import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endfilter %} +{# Omit formatting for this code block to avoid keeping track of whether the manifest is empty #} +# fmt: off __protobuf__ = {{ p }}.module( - package='{{ '.'.join(proto.meta.address.package) }}', + package="{{ '.'.join(proto.meta.address.package) }}", {% if api.naming.proto_package != '.'.join(proto.meta.address.package) %} - marshal='{{ api.naming.proto_package }}', + marshal="{{ api.naming.proto_package }}", {% endif %} manifest={ {% for enum in proto.enums.values() %} - '{{ enum.name }}', + "{{ enum.name }}", {% endfor %} {% for message in proto.messages.values() %} - '{{ message.name }}', + "{{ message.name }}", {% endfor %} }, ) +# fmt: on {% for enum in proto.enums.values() %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 index bec74ebf90..69cd42e6cf 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 @@ -16,10 +16,10 @@ from .{{proto.module_name }} import ( __all__ = ( {% for _, proto in api.protos|dictsort if proto.file_to_generate %} {% for _, message in proto.messages|dictsort %} - '{{ message.name }}', + "{{ message.name }}", {% endfor %} {% for _, enum in proto.enums|dictsort %} - '{{ enum.name }}', + "{{ enum.name }}", {% endfor %} {% endfor %} ) diff --git a/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 714b9eead1..4fb78bb4a4 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -1,6 +1,6 @@ class {{ message.name }}({{ p }}.Message): - r"""{{ message.meta.doc|rst(indent=4) }} {% if message.fields|length %} + r"""{{ message.meta.doc|rst(indent=4) }} {# Only include note if a oneof has more than one member field. #} {% if message.oneof_fields() %} {% if message.oneof_fields().values() | map('length') | max > 1 %} @@ -25,8 +25,11 @@ class {{ message.name }}({{ p }}.Message): This field is a member of `oneof`_ ``{{ field.oneof }}``. {% endif %} {% endfor %} - {% endif %} """ + {% else %} + r"""{{ message.meta.doc|rst(indent=4) }}""" + {% endif %} + {# Iterate over nested enums. -#} {% for enum in message.nested_enums.values() %} {% filter indent(first=True) %} @@ -52,7 +55,7 @@ class {{ message.name }}({{ p }}.Message): {# Iterate over fields. -#} {% for field in message.fields.values() %} {% if field.map %} - {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] %} + {% with key_field = field.message.fields["key"], value_field = field.message.fields["value"] %} {{ field.name }}: MutableMapping[{{ key_field.type.ident.rel(message.ident) }}, {{ value_field.type.ident.rel(message.ident) }}] = {{ p }}.MapField( {{ p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, @@ -69,7 +72,7 @@ class {{ message.name }}({{ p }}.Message): {% if field.proto3_optional %} optional=True, {% elif field.oneof %} - oneof='{{ field.oneof }}', + oneof="{{ field.oneof }}", {% endif %} {% if field.enum or field.message %} {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, diff --git a/gapic/templates/examples/feature_fragments.j2 b/gapic/templates/examples/feature_fragments.j2 index 66815cbe8f..0781f10c68 100644 --- a/gapic/templates/examples/feature_fragments.j2 +++ b/gapic/templates/examples/feature_fragments.j2 @@ -186,7 +186,7 @@ configs the client streaming logic should be modified to allow 2+ request object calling_form_enum.RequestStreamingClient] %} # This method expects an iterator which contains -# '{{module_name}}.{{ request_type.ident.name }}' objects +# "{{module_name}}.{{ request_type.ident.name }}" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/gapic/templates/noxfile.py.j2 b/gapic/templates/noxfile.py.j2 index c980027f82..0071be8d10 100644 --- a/gapic/templates/noxfile.py.j2 +++ b/gapic/templates/noxfile.py.j2 @@ -16,10 +16,10 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" {% if api.naming.module_namespace %} -FORMAT_PATHS = ["{{ api.naming.module_namespace[0] }}", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "{{ api.naming.module_namespace[0] }}", "tests", "noxfile.py", "setup.py"] {% else %} -FORMAT_PATHS = ["{{ api.naming.versioned_module_name }}", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "{{ api.naming.versioned_module_name }}", "tests", "noxfile.py", "setup.py"] {% endif %} diff --git a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index f15326d670..52c57ad77d 100644 --- a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -425,7 +425,7 @@ def test_{{ method_name }}_field_headers(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim("'") }} + {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim('"') }} {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] @@ -472,7 +472,7 @@ async def test_{{ method_name }}_field_headers_async(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim("'") }} + {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim('"') }} {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] diff --git a/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/tests/integration/goldens/asset/google/cloud/asset/__init__.py index fd9404f4b6..ada590b590 100755 --- a/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -99,83 +99,84 @@ from google.cloud.asset_v1.types.assets import TimeWindow from google.cloud.asset_v1.types.assets import VersionedResource -__all__ = ('AssetServiceClient', - 'AssetServiceAsyncClient', - 'AnalyzeIamPolicyLongrunningMetadata', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'BigQueryDestination', - 'CreateFeedRequest', - 'CreateSavedQueryRequest', - 'DeleteFeedRequest', - 'DeleteSavedQueryRequest', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GcsOutputResult', - 'GetFeedRequest', - 'GetSavedQueryRequest', - 'IamPolicyAnalysisOutputConfig', - 'IamPolicyAnalysisQuery', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'OutputConfig', - 'OutputResult', - 'PartitionSpec', - 'PubsubDestination', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'SavedQuery', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'TableFieldSchema', - 'TableSchema', - 'UpdateFeedRequest', - 'UpdateSavedQueryRequest', - 'ContentType', - 'Asset', - 'AttachedResource', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', - 'IamPolicyAnalysisState', - 'IamPolicySearchResult', - 'RelatedAsset', - 'RelatedAssets', - 'RelatedResource', - 'RelatedResources', - 'RelationshipAttributes', - 'Resource', - 'ResourceSearchResult', - 'TemporalAsset', - 'TimeWindow', - 'VersionedResource', +__all__ = ( + "AssetServiceClient", + "AssetServiceAsyncClient", + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "ContentType", + "Asset", + "AttachedResource", + "ConditionEvaluation", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "TemporalAsset", + "TimeWindow", + "VersionedResource", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 31068ac472..a68d6cb717 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -109,10 +109,10 @@ from .types.assets import TimeWindow from .types.assets import VersionedResource -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.asset_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.asset_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: @@ -122,20 +122,24 @@ _py_version_str = sys.version.split()[0] _package_label = "google.cloud.asset_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -173,104 +177,108 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'AssetServiceAsyncClient', -'AnalyzeIamPolicyLongrunningMetadata', -'AnalyzeIamPolicyLongrunningRequest', -'AnalyzeIamPolicyLongrunningResponse', -'AnalyzeIamPolicyRequest', -'AnalyzeIamPolicyResponse', -'AnalyzeMoveRequest', -'AnalyzeMoveResponse', -'AnalyzeOrgPoliciesRequest', -'AnalyzeOrgPoliciesResponse', -'AnalyzeOrgPolicyGovernedAssetsRequest', -'AnalyzeOrgPolicyGovernedAssetsResponse', -'AnalyzeOrgPolicyGovernedContainersRequest', -'AnalyzeOrgPolicyGovernedContainersResponse', -'AnalyzerOrgPolicy', -'AnalyzerOrgPolicyConstraint', -'Asset', -'AssetServiceClient', -'AttachedResource', -'BatchGetAssetsHistoryRequest', -'BatchGetAssetsHistoryResponse', -'BatchGetEffectiveIamPoliciesRequest', -'BatchGetEffectiveIamPoliciesResponse', -'BigQueryDestination', -'ConditionEvaluation', -'ContentType', -'CreateFeedRequest', -'CreateSavedQueryRequest', -'DeleteFeedRequest', -'DeleteSavedQueryRequest', -'ExportAssetsRequest', -'ExportAssetsResponse', -'Feed', -'FeedOutputConfig', -'GcsDestination', -'GcsOutputResult', -'GetFeedRequest', -'GetSavedQueryRequest', -'IamPolicyAnalysisOutputConfig', -'IamPolicyAnalysisQuery', -'IamPolicyAnalysisResult', -'IamPolicyAnalysisState', -'IamPolicySearchResult', -'ListAssetsRequest', -'ListAssetsResponse', -'ListFeedsRequest', -'ListFeedsResponse', -'ListSavedQueriesRequest', -'ListSavedQueriesResponse', -'MoveAnalysis', -'MoveAnalysisResult', -'MoveImpact', -'OutputConfig', -'OutputResult', -'PartitionSpec', -'PubsubDestination', -'QueryAssetsOutputConfig', -'QueryAssetsRequest', -'QueryAssetsResponse', -'QueryResult', -'RelatedAsset', -'RelatedAssets', -'RelatedResource', -'RelatedResources', -'RelationshipAttributes', -'Resource', -'ResourceSearchResult', -'SavedQuery', -'SearchAllIamPoliciesRequest', -'SearchAllIamPoliciesResponse', -'SearchAllResourcesRequest', -'SearchAllResourcesResponse', -'TableFieldSchema', -'TableSchema', -'TemporalAsset', -'TimeWindow', -'UpdateFeedRequest', -'UpdateSavedQueryRequest', -'VersionedResource', + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "Asset", + "AssetServiceAsyncClient", + "AssetServiceClient", + "AttachedResource", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "ConditionEvaluation", + "ContentType", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "TemporalAsset", + "TimeWindow", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "VersionedResource", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py index b35796f582..7822264387 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py @@ -17,6 +17,6 @@ from .async_client import AssetServiceAsyncClient __all__ = ( - 'AssetServiceClient', - 'AssetServiceAsyncClient', + "AssetServiceClient", + "AssetServiceAsyncClient", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 38379ce9d7..864c7cf27b 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.asset_v1 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -39,7 +50,7 @@ from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -50,12 +61,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class AssetServiceAsyncClient: """Asset service definition.""" @@ -190,12 +203,14 @@ def universe_domain(self) -> str: get_transport_class = AssetServiceClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the asset service async client. Args: @@ -253,30 +268,32 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.asset_v1.AssetServiceAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.asset.v1.AssetService", "credentialsType": None, - } + }, ) - async def export_assets(self, - request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def export_assets( + self, + request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location destinations, the output format is newline-delimited JSON. Each @@ -359,11 +376,13 @@ async def sample_export_assets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -387,14 +406,15 @@ async def sample_export_assets(): # Done; return the response. return response - async def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsAsyncPager: + async def list_assets( + self, + request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetsAsyncPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -463,8 +483,7 @@ async def sample_list_assets(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -482,11 +501,13 @@ async def sample_list_assets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -513,13 +534,14 @@ async def sample_list_assets(): # Done; return the response. return response - async def batch_get_assets_history(self, - request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetAssetsHistoryResponse: + async def batch_get_assets_history( + self, + request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when the asset and its attached IAM POLICY both exist. This can @@ -581,11 +603,13 @@ async def sample_batch_get_assets_history(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -601,14 +625,15 @@ async def sample_batch_get_assets_history(): # Done; return the response. return response - async def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def create_feed( + self, + request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset updates. @@ -687,8 +712,7 @@ async def sample_create_feed(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -706,11 +730,13 @@ async def sample_create_feed(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -726,14 +752,15 @@ async def sample_create_feed(): # Done; return the response. return response - async def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def get_feed( + self, + request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Gets details about an asset feed. .. code-block:: python @@ -800,8 +827,7 @@ async def sample_get_feed(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -819,11 +845,13 @@ async def sample_get_feed(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -839,14 +867,15 @@ async def sample_get_feed(): # Done; return the response. return response - async def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: + async def list_feeds( + self, + request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -908,8 +937,7 @@ async def sample_list_feeds(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -927,11 +955,13 @@ async def sample_list_feeds(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -947,14 +977,15 @@ async def sample_list_feeds(): # Done; return the response. return response - async def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + async def update_feed( + self, + request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, + *, + feed: Optional[asset_service.Feed] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Updates an asset feed configuration. .. code-block:: python @@ -1025,8 +1056,7 @@ async def sample_update_feed(): flattened_params = [feed] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1044,11 +1074,13 @@ async def sample_update_feed(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("feed.name", request.feed.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1064,14 +1096,15 @@ async def sample_update_feed(): # Done; return the response. return response - async def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_feed( + self, + request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an asset feed. .. code-block:: python @@ -1123,8 +1156,7 @@ async def sample_delete_feed(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1142,11 +1174,13 @@ async def sample_delete_feed(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1159,16 +1193,17 @@ async def sample_delete_feed(): metadata=metadata, ) - async def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesAsyncPager: + async def search_all_resources( + self, + request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + asset_types: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllResourcesAsyncPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllResources`` permission @@ -1338,8 +1373,7 @@ async def sample_search_all_resources(): flattened_params = [scope, query, asset_types] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1361,11 +1395,13 @@ async def sample_search_all_resources(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1392,15 +1428,16 @@ async def sample_search_all_resources(): # Done; return the response. return response - async def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesAsyncPager: + async def search_all_iam_policies( + self, + request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllIamPoliciesAsyncPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllIamPolicies`` permission on the @@ -1532,8 +1569,7 @@ async def sample_search_all_iam_policies(): flattened_params = [scope, query] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1553,11 +1589,13 @@ async def sample_search_all_iam_policies(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1584,13 +1622,14 @@ async def sample_search_all_iam_policies(): # Done; return the response. return response - async def analyze_iam_policy(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeIamPolicyResponse: + async def analyze_iam_policy( + self, + request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -1653,11 +1692,13 @@ async def sample_analyze_iam_policy(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("analysis_query.scope", request.analysis_query.scope), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1673,13 +1714,14 @@ async def sample_analyze_iam_policy(): # Done; return the response. return response - async def analyze_iam_policy_longrunning(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def analyze_iam_policy_longrunning( + self, + request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis results to a Google Cloud Storage or a BigQuery destination. For @@ -1762,11 +1804,13 @@ async def sample_analyze_iam_policy_longrunning(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("analysis_query.scope", request.analysis_query.scope), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1790,13 +1834,14 @@ async def sample_analyze_iam_policy_longrunning(): # Done; return the response. return response - async def analyze_move(self, - request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeMoveResponse: + async def analyze_move( + self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is best effort depending on the user's permissions of @@ -1862,11 +1907,13 @@ async def sample_analyze_move(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("resource", request.resource), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1882,13 +1929,14 @@ async def sample_analyze_move(): # Done; return the response. return response - async def query_assets(self, - request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.QueryAssetsResponse: + async def query_assets( + self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard SQL `__. @@ -1960,11 +2008,13 @@ async def sample_query_assets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1980,16 +2030,17 @@ async def sample_query_assets(): # Done; return the response. return response - async def create_saved_query(self, - request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - saved_query: Optional[asset_service.SavedQuery] = None, - saved_query_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def create_saved_query( + self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2077,8 +2128,7 @@ async def sample_create_saved_query(): flattened_params = [parent, saved_query, saved_query_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2100,11 +2150,13 @@ async def sample_create_saved_query(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2120,14 +2172,15 @@ async def sample_create_saved_query(): # Done; return the response. return response - async def get_saved_query(self, - request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def get_saved_query( + self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Gets details about a saved query. .. code-block:: python @@ -2190,8 +2243,7 @@ async def sample_get_saved_query(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2209,11 +2261,13 @@ async def sample_get_saved_query(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2229,14 +2283,15 @@ async def sample_get_saved_query(): # Done; return the response. return response - async def list_saved_queries(self, - request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSavedQueriesAsyncPager: + async def list_saved_queries( + self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSavedQueriesAsyncPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2305,8 +2360,7 @@ async def sample_list_saved_queries(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2324,11 +2378,13 @@ async def sample_list_saved_queries(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2355,15 +2411,16 @@ async def sample_list_saved_queries(): # Done; return the response. return response - async def update_saved_query(self, - request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, - *, - saved_query: Optional[asset_service.SavedQuery] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + async def update_saved_query( + self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Updates a saved query. .. code-block:: python @@ -2434,8 +2491,7 @@ async def sample_update_saved_query(): flattened_params = [saved_query, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2455,11 +2511,13 @@ async def sample_update_saved_query(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("saved_query.name", request.saved_query.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2475,14 +2533,15 @@ async def sample_update_saved_query(): # Done; return the response. return response - async def delete_saved_query(self, - request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_saved_query( + self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a saved query. .. code-block:: python @@ -2536,8 +2595,7 @@ async def sample_delete_saved_query(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2555,11 +2613,13 @@ async def sample_delete_saved_query(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2572,13 +2632,14 @@ async def sample_delete_saved_query(): metadata=metadata, ) - async def batch_get_effective_iam_policies(self, - request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + async def batch_get_effective_iam_policies( + self, + request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. .. code-block:: python @@ -2638,11 +2699,13 @@ async def sample_batch_get_effective_iam_policies(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2658,16 +2721,17 @@ async def sample_batch_get_effective_iam_policies(): # Done; return the response. return response - async def analyze_org_policies(self, - request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPoliciesAsyncPager: + async def analyze_org_policies( + self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPoliciesAsyncPager: r"""Analyzes organization policies under a scope. .. code-block:: python @@ -2759,8 +2823,7 @@ async def sample_analyze_org_policies(): flattened_params = [scope, constraint, filter] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2782,11 +2845,13 @@ async def sample_analyze_org_policies(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2813,16 +2878,17 @@ async def sample_analyze_org_policies(): # Done; return the response. return response - async def analyze_org_policy_governed_containers(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: + async def analyze_org_policy_governed_containers( + self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -2914,8 +2980,7 @@ async def sample_analyze_org_policy_governed_containers(): flattened_params = [scope, constraint, filter] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2937,11 +3002,13 @@ async def sample_analyze_org_policy_governed_containers(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2968,16 +3035,17 @@ async def sample_analyze_org_policy_governed_containers(): # Done; return the response. return response - async def analyze_org_policy_governed_assets(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: + async def analyze_org_policy_governed_assets( + self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: @@ -3098,8 +3166,7 @@ async def sample_analyze_org_policy_governed_assets(): flattened_params = [scope, constraint, filter] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3121,11 +3188,13 @@ async def sample_analyze_org_policy_governed_assets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3189,17 +3258,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3210,12 +3280,11 @@ async def __aenter__(self) -> "AssetServiceAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "AssetServiceAsyncClient", -) +__all__ = ("AssetServiceAsyncClient",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 843790205a..610ffb03c0 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.asset_v1 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -53,7 +66,7 @@ from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -71,14 +84,16 @@ class AssetServiceClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] _transport_registry["grpc"] = AssetServiceGrpcTransport _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport _transport_registry["rest"] = AssetServiceRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[AssetServiceTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AssetServiceTransport]: """Returns an appropriate transport class. Args: @@ -114,9 +129,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -125,16 +138,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "cloudasset.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "cloudasset.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -148,21 +160,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -197,7 +207,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): AssetServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -214,23 +225,37 @@ def transport(self) -> AssetServiceTransport: return self._transport @staticmethod - def access_level_path(access_policy: str,access_level: str,) -> str: + def access_level_path( + access_policy: str, + access_level: str, + ) -> str: """Returns a fully-qualified access_level string.""" - return "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + return "accessPolicies/{access_policy}/accessLevels/{access_level}".format( + access_policy=access_policy, + access_level=access_level, + ) @staticmethod - def parse_access_level_path(path: str) -> Dict[str,str]: + def parse_access_level_path( + path: str, + ) -> Dict[str, str]: """Parses a access_level path into its component segments.""" m = re.match(r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def access_policy_path(access_policy: str,) -> str: + def access_policy_path( + access_policy: str, + ) -> str: """Returns a fully-qualified access_policy string.""" - return "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + return "accessPolicies/{access_policy}".format( + access_policy=access_policy, + ) @staticmethod - def parse_access_policy_path(path: str) -> Dict[str,str]: + def parse_access_policy_path( + path: str, + ) -> Dict[str, str]: """Parses a access_policy path into its component segments.""" m = re.match(r"^accessPolicies/(?P.+?)$", path) return m.groupdict() if m else {} @@ -241,112 +266,183 @@ def asset_path() -> str: return "*".format() @staticmethod - def parse_asset_path(path: str) -> Dict[str,str]: + def parse_asset_path( + path: str, + ) -> Dict[str, str]: """Parses a asset path into its component segments.""" m = re.match(r"^.*$", path) return m.groupdict() if m else {} @staticmethod - def feed_path(project: str,feed: str,) -> str: + def feed_path( + project: str, + feed: str, + ) -> str: """Returns a fully-qualified feed string.""" - return "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) + return "projects/{project}/feeds/{feed}".format( + project=project, + feed=feed, + ) @staticmethod - def parse_feed_path(path: str) -> Dict[str,str]: + def parse_feed_path( + path: str, + ) -> Dict[str, str]: """Parses a feed path into its component segments.""" m = re.match(r"^projects/(?P.+?)/feeds/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def inventory_path(project: str,location: str,instance: str,) -> str: + def inventory_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified inventory string.""" - return "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}/inventory".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_inventory_path(path: str) -> Dict[str,str]: + def parse_inventory_path( + path: str, + ) -> Dict[str, str]: """Parses a inventory path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", path) return m.groupdict() if m else {} @staticmethod - def saved_query_path(project: str,saved_query: str,) -> str: + def saved_query_path( + project: str, + saved_query: str, + ) -> str: """Returns a fully-qualified saved_query string.""" - return "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + return "projects/{project}/savedQueries/{saved_query}".format( + project=project, + saved_query=saved_query, + ) @staticmethod - def parse_saved_query_path(path: str) -> Dict[str,str]: + def parse_saved_query_path( + path: str, + ) -> Dict[str, str]: """Parses a saved_query path into its component segments.""" m = re.match(r"^projects/(?P.+?)/savedQueries/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def service_perimeter_path(access_policy: str,service_perimeter: str,) -> str: + def service_perimeter_path( + access_policy: str, + service_perimeter: str, + ) -> str: """Returns a fully-qualified service_perimeter string.""" - return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format( + access_policy=access_policy, + service_perimeter=service_perimeter, + ) @staticmethod - def parse_service_perimeter_path(path: str) -> Dict[str,str]: + def parse_service_perimeter_path( + path: str, + ) -> Dict[str, str]: """Parses a service_perimeter path into its component segments.""" m = re.match(r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -378,8 +474,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = AssetServiceClient._use_client_cert_effective() @@ -509,7 +607,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -549,12 +647,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the asset service client. Args: @@ -614,12 +714,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssetServiceClient._read_environment_variables() self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = AssetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -639,22 +739,22 @@ def __init__(self, *, if transport_provided: # transport is a AssetServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(AssetServiceTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - AssetServiceClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or AssetServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -684,24 +784,27 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.asset_v1.AssetServiceClient`.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.asset.v1.AssetService", "credentialsType": None, - } + }, ) - def export_assets(self, - request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def export_assets( + self, + request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location destinations, the output format is newline-delimited JSON. Each @@ -784,11 +887,13 @@ def sample_export_assets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -812,14 +917,15 @@ def sample_export_assets(): # Done; return the response. return response - def list_assets(self, - request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListAssetsPager: + def list_assets( + self, + request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAssetsPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -888,8 +994,7 @@ def sample_list_assets(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -906,11 +1011,13 @@ def sample_list_assets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -937,13 +1044,14 @@ def sample_list_assets(): # Done; return the response. return response - def batch_get_assets_history(self, - request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetAssetsHistoryResponse: + def batch_get_assets_history( + self, + request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when the asset and its attached IAM POLICY both exist. This can @@ -1005,11 +1113,13 @@ def sample_batch_get_assets_history(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1025,14 +1135,15 @@ def sample_batch_get_assets_history(): # Done; return the response. return response - def create_feed(self, - request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def create_feed( + self, + request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset updates. @@ -1111,8 +1222,7 @@ def sample_create_feed(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1129,11 +1239,13 @@ def sample_create_feed(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1149,14 +1261,15 @@ def sample_create_feed(): # Done; return the response. return response - def get_feed(self, - request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def get_feed( + self, + request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Gets details about an asset feed. .. code-block:: python @@ -1223,8 +1336,7 @@ def sample_get_feed(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1241,11 +1353,13 @@ def sample_get_feed(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1261,14 +1375,15 @@ def sample_get_feed(): # Done; return the response. return response - def list_feeds(self, - request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.ListFeedsResponse: + def list_feeds( + self, + request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -1330,8 +1445,7 @@ def sample_list_feeds(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1348,11 +1462,13 @@ def sample_list_feeds(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1368,14 +1484,15 @@ def sample_list_feeds(): # Done; return the response. return response - def update_feed(self, - request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, - *, - feed: Optional[asset_service.Feed] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.Feed: + def update_feed( + self, + request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, + *, + feed: Optional[asset_service.Feed] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Updates an asset feed configuration. .. code-block:: python @@ -1446,8 +1563,7 @@ def sample_update_feed(): flattened_params = [feed] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1464,11 +1580,13 @@ def sample_update_feed(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("feed.name", request.feed.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1484,14 +1602,15 @@ def sample_update_feed(): # Done; return the response. return response - def delete_feed(self, - request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_feed( + self, + request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an asset feed. .. code-block:: python @@ -1543,8 +1662,7 @@ def sample_delete_feed(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1561,11 +1679,13 @@ def sample_delete_feed(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1578,16 +1698,17 @@ def sample_delete_feed(): metadata=metadata, ) - def search_all_resources(self, - request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - asset_types: Optional[MutableSequence[str]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllResourcesPager: + def search_all_resources( + self, + request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + asset_types: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllResourcesPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllResources`` permission @@ -1757,8 +1878,7 @@ def sample_search_all_resources(): flattened_params = [scope, query, asset_types] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1779,11 +1899,13 @@ def sample_search_all_resources(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1810,15 +1932,16 @@ def sample_search_all_resources(): # Done; return the response. return response - def search_all_iam_policies(self, - request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - query: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.SearchAllIamPoliciesPager: + def search_all_iam_policies( + self, + request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + query: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAllIamPoliciesPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the ``cloudasset.assets.searchAllIamPolicies`` permission on the @@ -1950,8 +2073,7 @@ def sample_search_all_iam_policies(): flattened_params = [scope, query] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1970,11 +2092,13 @@ def sample_search_all_iam_policies(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2001,13 +2125,14 @@ def sample_search_all_iam_policies(): # Done; return the response. return response - def analyze_iam_policy(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeIamPolicyResponse: + def analyze_iam_policy( + self, + request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -2070,11 +2195,13 @@ def sample_analyze_iam_policy(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("analysis_query.scope", request.analysis_query.scope), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2090,13 +2217,14 @@ def sample_analyze_iam_policy(): # Done; return the response. return response - def analyze_iam_policy_longrunning(self, - request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def analyze_iam_policy_longrunning( + self, + request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis results to a Google Cloud Storage or a BigQuery destination. For @@ -2179,11 +2307,13 @@ def sample_analyze_iam_policy_longrunning(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("analysis_query.scope", request.analysis_query.scope), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2207,13 +2337,14 @@ def sample_analyze_iam_policy_longrunning(): # Done; return the response. return response - def analyze_move(self, - request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.AnalyzeMoveResponse: + def analyze_move( + self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is best effort depending on the user's permissions of @@ -2279,11 +2410,13 @@ def sample_analyze_move(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("resource", request.resource), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2299,13 +2432,14 @@ def sample_analyze_move(): # Done; return the response. return response - def query_assets(self, - request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.QueryAssetsResponse: + def query_assets( + self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard SQL `__. @@ -2377,11 +2511,13 @@ def sample_query_assets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2397,16 +2533,17 @@ def sample_query_assets(): # Done; return the response. return response - def create_saved_query(self, - request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, - *, - parent: Optional[str] = None, - saved_query: Optional[asset_service.SavedQuery] = None, - saved_query_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def create_saved_query( + self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2494,8 +2631,7 @@ def sample_create_saved_query(): flattened_params = [parent, saved_query, saved_query_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2516,11 +2652,13 @@ def sample_create_saved_query(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2536,14 +2674,15 @@ def sample_create_saved_query(): # Done; return the response. return response - def get_saved_query(self, - request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def get_saved_query( + self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Gets details about a saved query. .. code-block:: python @@ -2606,8 +2745,7 @@ def sample_get_saved_query(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2624,11 +2762,13 @@ def sample_get_saved_query(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2644,14 +2784,15 @@ def sample_get_saved_query(): # Done; return the response. return response - def list_saved_queries(self, - request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSavedQueriesPager: + def list_saved_queries( + self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSavedQueriesPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2720,8 +2861,7 @@ def sample_list_saved_queries(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2738,11 +2878,13 @@ def sample_list_saved_queries(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2769,15 +2911,16 @@ def sample_list_saved_queries(): # Done; return the response. return response - def update_saved_query(self, - request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, - *, - saved_query: Optional[asset_service.SavedQuery] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.SavedQuery: + def update_saved_query( + self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Updates a saved query. .. code-block:: python @@ -2848,8 +2991,7 @@ def sample_update_saved_query(): flattened_params = [saved_query, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2868,11 +3010,13 @@ def sample_update_saved_query(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("saved_query.name", request.saved_query.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2888,14 +3032,15 @@ def sample_update_saved_query(): # Done; return the response. return response - def delete_saved_query(self, - request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_saved_query( + self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a saved query. .. code-block:: python @@ -2949,8 +3094,7 @@ def sample_delete_saved_query(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2967,11 +3111,13 @@ def sample_delete_saved_query(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2984,13 +3130,14 @@ def sample_delete_saved_query(): metadata=metadata, ) - def batch_get_effective_iam_policies(self, - request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + def batch_get_effective_iam_policies( + self, + request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. .. code-block:: python @@ -3050,11 +3197,13 @@ def sample_batch_get_effective_iam_policies(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3070,16 +3219,17 @@ def sample_batch_get_effective_iam_policies(): # Done; return the response. return response - def analyze_org_policies(self, - request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPoliciesPager: + def analyze_org_policies( + self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPoliciesPager: r"""Analyzes organization policies under a scope. .. code-block:: python @@ -3171,8 +3321,7 @@ def sample_analyze_org_policies(): flattened_params = [scope, constraint, filter] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3193,11 +3342,13 @@ def sample_analyze_org_policies(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3224,16 +3375,17 @@ def sample_analyze_org_policies(): # Done; return the response. return response - def analyze_org_policy_governed_containers(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: + def analyze_org_policy_governed_containers( + self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -3325,8 +3477,7 @@ def sample_analyze_org_policy_governed_containers(): flattened_params = [scope, constraint, filter] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3347,11 +3498,13 @@ def sample_analyze_org_policy_governed_containers(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3378,16 +3531,17 @@ def sample_analyze_org_policy_governed_containers(): # Done; return the response. return response - def analyze_org_policy_governed_assets(self, - request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, - *, - scope: Optional[str] = None, - constraint: Optional[str] = None, - filter: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: + def analyze_org_policy_governed_assets( + self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: @@ -3508,8 +3662,7 @@ def sample_analyze_org_policy_governed_assets(): flattened_params = [scope, constraint, filter] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3530,11 +3683,13 @@ def sample_analyze_org_policy_governed_assets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("scope", request.scope), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3611,10 +3766,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -3622,7 +3774,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3631,18 +3787,9 @@ def get_operation( raise e - - - - - - - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "AssetServiceClient", -) +__all__ = ("AssetServiceClient",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 5d14be97dd..7153bfa7fd 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -45,14 +46,17 @@ class ListAssetsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.ListAssetsResponse], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.ListAssetsResponse], + request: asset_service.ListAssetsRequest, + response: asset_service.ListAssetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -93,7 +97,7 @@ def __iter__(self) -> Iterator[assets.Asset]: yield from page.assets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListAssetsAsyncPager: @@ -113,14 +117,17 @@ class ListAssetsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], - request: asset_service.ListAssetsRequest, - response: asset_service.ListAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], + request: asset_service.ListAssetsRequest, + response: asset_service.ListAssetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -155,6 +162,7 @@ async def pages(self) -> AsyncIterator[asset_service.ListAssetsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[assets.Asset]: async def async_generator(): async for page in self.pages: @@ -164,7 +172,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllResourcesPager: @@ -184,14 +192,17 @@ class SearchAllResourcesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.SearchAllResourcesResponse], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.SearchAllResourcesResponse], + request: asset_service.SearchAllResourcesRequest, + response: asset_service.SearchAllResourcesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -232,7 +243,7 @@ def __iter__(self) -> Iterator[assets.ResourceSearchResult]: yield from page.results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllResourcesAsyncPager: @@ -252,14 +263,17 @@ class SearchAllResourcesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], - request: asset_service.SearchAllResourcesRequest, - response: asset_service.SearchAllResourcesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], + request: asset_service.SearchAllResourcesRequest, + response: asset_service.SearchAllResourcesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -294,6 +308,7 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllResourcesResponse] self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[assets.ResourceSearchResult]: async def async_generator(): async for page in self.pages: @@ -303,7 +318,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllIamPoliciesPager: @@ -323,14 +338,17 @@ class SearchAllIamPoliciesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.SearchAllIamPoliciesResponse], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.SearchAllIamPoliciesResponse], + request: asset_service.SearchAllIamPoliciesRequest, + response: asset_service.SearchAllIamPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -371,7 +389,7 @@ def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: yield from page.results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class SearchAllIamPoliciesAsyncPager: @@ -391,14 +409,17 @@ class SearchAllIamPoliciesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], - request: asset_service.SearchAllIamPoliciesRequest, - response: asset_service.SearchAllIamPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], + request: asset_service.SearchAllIamPoliciesRequest, + response: asset_service.SearchAllIamPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -433,6 +454,7 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllIamPoliciesRespons self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[assets.IamPolicySearchResult]: async def async_generator(): async for page in self.pages: @@ -442,7 +464,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSavedQueriesPager: @@ -462,14 +484,17 @@ class ListSavedQueriesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.ListSavedQueriesResponse], - request: asset_service.ListSavedQueriesRequest, - response: asset_service.ListSavedQueriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.ListSavedQueriesResponse], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -510,7 +535,7 @@ def __iter__(self) -> Iterator[asset_service.SavedQuery]: yield from page.saved_queries def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSavedQueriesAsyncPager: @@ -530,14 +555,17 @@ class ListSavedQueriesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], - request: asset_service.ListSavedQueriesRequest, - response: asset_service.ListSavedQueriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -572,6 +600,7 @@ async def pages(self) -> AsyncIterator[asset_service.ListSavedQueriesResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.SavedQuery]: async def async_generator(): async for page in self.pages: @@ -581,7 +610,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPoliciesPager: @@ -601,14 +630,17 @@ class AnalyzeOrgPoliciesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], - request: asset_service.AnalyzeOrgPoliciesRequest, - response: asset_service.AnalyzeOrgPoliciesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -649,7 +681,7 @@ def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolic yield from page.org_policy_results def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPoliciesAsyncPager: @@ -669,14 +701,17 @@ class AnalyzeOrgPoliciesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], - request: asset_service.AnalyzeOrgPoliciesRequest, - response: asset_service.AnalyzeOrgPoliciesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -711,6 +746,7 @@ async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse] self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: async def async_generator(): async for page in self.pages: @@ -720,7 +756,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedContainersPager: @@ -740,14 +776,17 @@ class AnalyzeOrgPolicyGovernedContainersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -788,7 +827,7 @@ def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersR yield from page.governed_containers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedContainersAsyncPager: @@ -808,14 +847,17 @@ class AnalyzeOrgPolicyGovernedContainersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]], - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -850,6 +892,7 @@ async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedCon self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: async def async_generator(): async for page in self.pages: @@ -859,7 +902,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedAssetsPager: @@ -879,14 +922,17 @@ class AnalyzeOrgPolicyGovernedAssetsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -927,7 +973,7 @@ def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsRespo yield from page.governed_assets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class AnalyzeOrgPolicyGovernedAssetsAsyncPager: @@ -947,14 +993,17 @@ class AnalyzeOrgPolicyGovernedAssetsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]], - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -989,6 +1038,7 @@ async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAss self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: async def async_generator(): async for page in self.pages: @@ -998,4 +1048,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py index be001a49d9..aebf04e535 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -25,14 +25,14 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] -_transport_registry['grpc'] = AssetServiceGrpcTransport -_transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport -_transport_registry['rest'] = AssetServiceRestTransport +_transport_registry["grpc"] = AssetServiceGrpcTransport +_transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport +_transport_registry["rest"] = AssetServiceRestTransport __all__ = ( - 'AssetServiceTransport', - 'AssetServiceGrpcTransport', - 'AssetServiceGrpcAsyncIOTransport', - 'AssetServiceRestTransport', - 'AssetServiceRestInterceptor', + "AssetServiceTransport", + "AssetServiceGrpcTransport", + "AssetServiceGrpcAsyncIOTransport", + "AssetServiceRestTransport", + "AssetServiceRestInterceptor", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 4aebab9170..f9e1ab5b99 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -25,11 +25,11 @@ from google.api_core import retry as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -41,29 +41,32 @@ class AssetServiceTransport(abc.ABC): """Abstract transport class for AssetService.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', + "https://www.googleapis.com/auth/cloud-platform", ) + # fmt: on - DEFAULT_HOST: str = 'cloudasset.googleapis.com' + DEFAULT_HOST: str = "cloudasset.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). + The hostname to connect to (default: "cloudasset.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -99,10 +102,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -110,15 +113,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -317,12 +324,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ @@ -333,220 +340,332 @@ def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() + # fmt: off @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def export_assets( + self, + ) -> Callable[ + [asset_service.ExportAssetsRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Union[ - asset_service.ListAssetsResponse, - Awaitable[asset_service.ListAssetsResponse] - ]]: + def list_assets( + self, + ) -> Callable[ + [asset_service.ListAssetsRequest], + Union[ + asset_service.ListAssetsResponse, + Awaitable[asset_service.ListAssetsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - Union[ - asset_service.BatchGetAssetsHistoryResponse, - Awaitable[asset_service.BatchGetAssetsHistoryResponse] - ]]: + def batch_get_assets_history( + self, + ) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + Union[ + asset_service.BatchGetAssetsHistoryResponse, + Awaitable[asset_service.BatchGetAssetsHistoryResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def create_feed( + self, + ) -> Callable[ + [asset_service.CreateFeedRequest], + Union[ + asset_service.Feed, + Awaitable[asset_service.Feed] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def get_feed( + self, + ) -> Callable[ + [asset_service.GetFeedRequest], + Union[ + asset_service.Feed, + Awaitable[asset_service.Feed] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Union[ - asset_service.ListFeedsResponse, - Awaitable[asset_service.ListFeedsResponse] - ]]: + def list_feeds( + self, + ) -> Callable[ + [asset_service.ListFeedsRequest], + Union[ + asset_service.ListFeedsResponse, + Awaitable[asset_service.ListFeedsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Union[ - asset_service.Feed, - Awaitable[asset_service.Feed] - ]]: + def update_feed( + self, + ) -> Callable[ + [asset_service.UpdateFeedRequest], + Union[ + asset_service.Feed, + Awaitable[asset_service.Feed] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_feed( + self, + ) -> Callable[ + [asset_service.DeleteFeedRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Union[ - asset_service.SearchAllResourcesResponse, - Awaitable[asset_service.SearchAllResourcesResponse] - ]]: + def search_all_resources( + self, + ) -> Callable[ + [asset_service.SearchAllResourcesRequest], + Union[ + asset_service.SearchAllResourcesResponse, + Awaitable[asset_service.SearchAllResourcesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Union[ - asset_service.SearchAllIamPoliciesResponse, - Awaitable[asset_service.SearchAllIamPoliciesResponse] - ]]: + def search_all_iam_policies( + self, + ) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + Union[ + asset_service.SearchAllIamPoliciesResponse, + Awaitable[asset_service.SearchAllIamPoliciesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - Union[ - asset_service.AnalyzeIamPolicyResponse, - Awaitable[asset_service.AnalyzeIamPolicyResponse] - ]]: + def analyze_iam_policy( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + Union[ + asset_service.AnalyzeIamPolicyResponse, + Awaitable[asset_service.AnalyzeIamPolicyResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - Union[ - asset_service.AnalyzeMoveResponse, - Awaitable[asset_service.AnalyzeMoveResponse] - ]]: + def analyze_move( + self, + ) -> Callable[ + [asset_service.AnalyzeMoveRequest], + Union[ + asset_service.AnalyzeMoveResponse, + Awaitable[asset_service.AnalyzeMoveResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - Union[ - asset_service.QueryAssetsResponse, - Awaitable[asset_service.QueryAssetsResponse] - ]]: + def query_assets( + self, + ) -> Callable[ + [asset_service.QueryAssetsRequest], + Union[ + asset_service.QueryAssetsResponse, + Awaitable[asset_service.QueryAssetsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def create_saved_query( + self, + ) -> Callable[ + [asset_service.CreateSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def get_saved_query( + self, + ) -> Callable[ + [asset_service.GetSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - Union[ - asset_service.ListSavedQueriesResponse, - Awaitable[asset_service.ListSavedQueriesResponse] - ]]: + def list_saved_queries( + self, + ) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Union[ + asset_service.ListSavedQueriesResponse, + Awaitable[asset_service.ListSavedQueriesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - Union[ - asset_service.SavedQuery, - Awaitable[asset_service.SavedQuery] - ]]: + def update_saved_query( + self, + ) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_saved_query( + self, + ) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - Union[ - asset_service.BatchGetEffectiveIamPoliciesResponse, - Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse] - ]]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Union[ + asset_service.BatchGetEffectiveIamPoliciesResponse, + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - Union[ - asset_service.AnalyzeOrgPoliciesResponse, - Awaitable[asset_service.AnalyzeOrgPoliciesResponse] - ]]: + def analyze_org_policies( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Union[ + asset_service.AnalyzeOrgPoliciesResponse, + Awaitable[asset_service.AnalyzeOrgPoliciesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - Union[ - asset_service.AnalyzeOrgPolicyGovernedContainersResponse, - Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] - ]]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - Union[ - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, - Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] - ]]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] + ], + ]: raise NotImplementedError() + # fmt: on @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property @@ -554,6 +673,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'AssetServiceTransport', -) +__all__ = ("AssetServiceTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 1edf735194..84854df699 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -22,7 +22,7 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -32,12 +32,13 @@ import proto # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -57,10 +58,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -68,7 +71,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -94,7 +97,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": client_call_details.method, "response": grpc_response, @@ -116,28 +119,31 @@ class AssetServiceGrpcTransport(AssetServiceTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). + The hostname to connect to (default: "cloudasset.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -212,7 +218,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -221,7 +228,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -256,19 +264,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -304,13 +314,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -322,17 +331,15 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - operations_pb2.Operation]: + def export_assets( + self, + ) -> Callable[[asset_service.ExportAssetsRequest], operations_pb2.Operation]: r"""Return a callable for the export assets method over gRPC. Exports assets with time and resource types to a given Cloud @@ -359,18 +366,18 @@ def export_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ExportAssets', + if "export_assets" not in self._stubs: + self._stubs["export_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ExportAssets", request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_assets'] + return self._stubs["export_assets"] @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: + def list_assets( + self, + ) -> Callable[[asset_service.ListAssetsRequest], asset_service.ListAssetsResponse]: r"""Return a callable for the list assets method over gRPC. Lists assets with time and resource types and returns @@ -386,18 +393,18 @@ def list_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListAssets', + if "list_assets" not in self._stubs: + self._stubs["list_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListAssets", request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, ) - return self._stubs['list_assets'] + return self._stubs["list_assets"] @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - asset_service.BatchGetAssetsHistoryResponse]: + def batch_get_assets_history( + self, + ) -> Callable[[asset_service.BatchGetAssetsHistoryRequest], asset_service.BatchGetAssetsHistoryResponse]: r"""Return a callable for the batch get assets history method over gRPC. Batch gets the update history of assets that overlap a time @@ -418,18 +425,18 @@ def batch_get_assets_history(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', + if "batch_get_assets_history" not in self._stubs: + self._stubs["batch_get_assets_history"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory", request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, ) - return self._stubs['batch_get_assets_history'] + return self._stubs["batch_get_assets_history"] @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: + def create_feed( + self, + ) -> Callable[[asset_service.CreateFeedRequest], asset_service.Feed]: r"""Return a callable for the create feed method over gRPC. Creates a feed in a parent @@ -446,18 +453,18 @@ def create_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateFeed', + if "create_feed" not in self._stubs: + self._stubs["create_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateFeed", request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['create_feed'] + return self._stubs["create_feed"] @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: + def get_feed( + self, + ) -> Callable[[asset_service.GetFeedRequest], asset_service.Feed]: r"""Return a callable for the get feed method over gRPC. Gets details about an asset feed. @@ -472,18 +479,18 @@ def get_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetFeed', + if "get_feed" not in self._stubs: + self._stubs["get_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetFeed", request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['get_feed'] + return self._stubs["get_feed"] @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: + def list_feeds( + self, + ) -> Callable[[asset_service.ListFeedsRequest], asset_service.ListFeedsResponse]: r"""Return a callable for the list feeds method over gRPC. Lists all asset feeds in a parent @@ -499,18 +506,18 @@ def list_feeds(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListFeeds', + if "list_feeds" not in self._stubs: + self._stubs["list_feeds"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListFeeds", request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, ) - return self._stubs['list_feeds'] + return self._stubs["list_feeds"] @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: + def update_feed( + self, + ) -> Callable[[asset_service.UpdateFeedRequest], asset_service.Feed]: r"""Return a callable for the update feed method over gRPC. Updates an asset feed configuration. @@ -525,18 +532,18 @@ def update_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateFeed', + if "update_feed" not in self._stubs: + self._stubs["update_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateFeed", request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['update_feed'] + return self._stubs["update_feed"] @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: + def delete_feed( + self, + ) -> Callable[[asset_service.DeleteFeedRequest], empty_pb2.Empty]: r"""Return a callable for the delete feed method over gRPC. Deletes an asset feed. @@ -551,18 +558,18 @@ def delete_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteFeed', + if "delete_feed" not in self._stubs: + self._stubs["delete_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteFeed", request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_feed'] + return self._stubs["delete_feed"] @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: + def search_all_resources( + self, + ) -> Callable[[asset_service.SearchAllResourcesRequest], asset_service.SearchAllResourcesResponse]: r"""Return a callable for the search all resources method over gRPC. Searches all Google Cloud resources within the specified scope, @@ -580,18 +587,18 @@ def search_all_resources(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllResources', + if "search_all_resources" not in self._stubs: + self._stubs["search_all_resources"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllResources", request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, ) - return self._stubs['search_all_resources'] + return self._stubs["search_all_resources"] @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: + def search_all_iam_policies( + self, + ) -> Callable[[asset_service.SearchAllIamPoliciesRequest], asset_service.SearchAllIamPoliciesResponse]: r"""Return a callable for the search all iam policies method over gRPC. Searches all IAM policies within the specified scope, such as a @@ -609,18 +616,18 @@ def search_all_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', + if "search_all_iam_policies" not in self._stubs: + self._stubs["search_all_iam_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllIamPolicies", request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, ) - return self._stubs['search_all_iam_policies'] + return self._stubs["search_all_iam_policies"] @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - asset_service.AnalyzeIamPolicyResponse]: + def analyze_iam_policy( + self, + ) -> Callable[[asset_service.AnalyzeIamPolicyRequest], asset_service.AnalyzeIamPolicyResponse]: r"""Return a callable for the analyze iam policy method over gRPC. Analyzes IAM policies to answer which identities have @@ -636,18 +643,18 @@ def analyze_iam_policy(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', + if "analyze_iam_policy" not in self._stubs: + self._stubs["analyze_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy", request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, ) - return self._stubs['analyze_iam_policy'] + return self._stubs["analyze_iam_policy"] @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - operations_pb2.Operation]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[[asset_service.AnalyzeIamPolicyLongrunningRequest], operations_pb2.Operation]: r"""Return a callable for the analyze iam policy longrunning method over gRPC. Analyzes IAM policies asynchronously to answer which identities @@ -673,18 +680,18 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', + if "analyze_iam_policy_longrunning" not in self._stubs: + self._stubs["analyze_iam_policy_longrunning"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning", request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['analyze_iam_policy_longrunning'] + return self._stubs["analyze_iam_policy_longrunning"] @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - asset_service.AnalyzeMoveResponse]: + def analyze_move( + self, + ) -> Callable[[asset_service.AnalyzeMoveRequest], asset_service.AnalyzeMoveResponse]: r"""Return a callable for the analyze move method over gRPC. Analyze moving a resource to a specified destination @@ -705,18 +712,18 @@ def analyze_move(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeMove', + if "analyze_move" not in self._stubs: + self._stubs["analyze_move"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeMove", request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, ) - return self._stubs['analyze_move'] + return self._stubs["analyze_move"] @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - asset_service.QueryAssetsResponse]: + def query_assets( + self, + ) -> Callable[[asset_service.QueryAssetsRequest], asset_service.QueryAssetsResponse]: r"""Return a callable for the query assets method over gRPC. Issue a job that queries assets using a SQL statement compatible @@ -746,18 +753,18 @@ def query_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/QueryAssets', + if "query_assets" not in self._stubs: + self._stubs["query_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/QueryAssets", request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, ) - return self._stubs['query_assets'] + return self._stubs["query_assets"] @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - asset_service.SavedQuery]: + def create_saved_query( + self, + ) -> Callable[[asset_service.CreateSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the create saved query method over gRPC. Creates a saved query in a parent @@ -773,18 +780,18 @@ def create_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + if "create_saved_query" not in self._stubs: + self._stubs["create_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateSavedQuery", request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['create_saved_query'] + return self._stubs["create_saved_query"] @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - asset_service.SavedQuery]: + def get_saved_query( + self, + ) -> Callable[[asset_service.GetSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the get saved query method over gRPC. Gets details about a saved query. @@ -799,18 +806,18 @@ def get_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetSavedQuery', + if "get_saved_query" not in self._stubs: + self._stubs["get_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetSavedQuery", request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['get_saved_query'] + return self._stubs["get_saved_query"] @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - asset_service.ListSavedQueriesResponse]: + def list_saved_queries( + self, + ) -> Callable[[asset_service.ListSavedQueriesRequest], asset_service.ListSavedQueriesResponse]: r"""Return a callable for the list saved queries method over gRPC. Lists all saved queries in a parent @@ -826,18 +833,18 @@ def list_saved_queries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListSavedQueries', + if "list_saved_queries" not in self._stubs: + self._stubs["list_saved_queries"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListSavedQueries", request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, ) - return self._stubs['list_saved_queries'] + return self._stubs["list_saved_queries"] @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - asset_service.SavedQuery]: + def update_saved_query( + self, + ) -> Callable[[asset_service.UpdateSavedQueryRequest], asset_service.SavedQuery]: r"""Return a callable for the update saved query method over gRPC. Updates a saved query. @@ -852,18 +859,18 @@ def update_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + if "update_saved_query" not in self._stubs: + self._stubs["update_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateSavedQuery", request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['update_saved_query'] + return self._stubs["update_saved_query"] @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - empty_pb2.Empty]: + def delete_saved_query( + self, + ) -> Callable[[asset_service.DeleteSavedQueryRequest], empty_pb2.Empty]: r"""Return a callable for the delete saved query method over gRPC. Deletes a saved query. @@ -878,18 +885,18 @@ def delete_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + if "delete_saved_query" not in self._stubs: + self._stubs["delete_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteSavedQuery", request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_saved_query'] + return self._stubs["delete_saved_query"] @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - asset_service.BatchGetEffectiveIamPoliciesResponse]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[[asset_service.BatchGetEffectiveIamPoliciesRequest], asset_service.BatchGetEffectiveIamPoliciesResponse]: r"""Return a callable for the batch get effective iam policies method over gRPC. @@ -905,18 +912,18 @@ def batch_get_effective_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', + if "batch_get_effective_iam_policies" not in self._stubs: + self._stubs["batch_get_effective_iam_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies", request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, ) - return self._stubs['batch_get_effective_iam_policies'] + return self._stubs["batch_get_effective_iam_policies"] @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - asset_service.AnalyzeOrgPoliciesResponse]: + def analyze_org_policies( + self, + ) -> Callable[[asset_service.AnalyzeOrgPoliciesRequest], asset_service.AnalyzeOrgPoliciesResponse]: r"""Return a callable for the analyze org policies method over gRPC. Analyzes organization policies under a scope. @@ -931,18 +938,18 @@ def analyze_org_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + if "analyze_org_policies" not in self._stubs: + self._stubs["analyze_org_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies", request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, ) - return self._stubs['analyze_org_policies'] + return self._stubs["analyze_org_policies"] @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[[asset_service.AnalyzeOrgPolicyGovernedContainersRequest], asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: r"""Return a callable for the analyze org policy governed containers method over gRPC. @@ -959,18 +966,18 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', + if "analyze_org_policy_governed_containers" not in self._stubs: + self._stubs["analyze_org_policy_governed_containers"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers", request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, ) - return self._stubs['analyze_org_policy_governed_containers'] + return self._stubs["analyze_org_policy_governed_containers"] @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: r"""Return a callable for the analyze org policy governed assets method over gRPC. @@ -1004,13 +1011,13 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', + if "analyze_org_policy_governed_assets" not in self._stubs: + self._stubs["analyze_org_policy_governed_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets", request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, ) - return self._stubs['analyze_org_policy_governed_assets'] + return self._stubs["analyze_org_policy_governed_assets"] def close(self): self._logged_channel.close() @@ -1019,8 +1026,7 @@ def close(self): def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1038,6 +1044,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'AssetServiceGrpcTransport', -) +__all__ = ("AssetServiceGrpcTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index a2b4aeb11a..433b9c5039 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -25,23 +25,24 @@ from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .grpc import AssetServiceGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -61,10 +62,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -72,7 +75,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -98,7 +101,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -125,13 +128,15 @@ class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -162,29 +167,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). + The hostname to connect to (default: "cloudasset.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -259,7 +266,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -268,7 +276,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -328,17 +337,15 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsAsyncClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - Awaitable[operations_pb2.Operation]]: + def export_assets( + self, + ) -> Callable[[asset_service.ExportAssetsRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the export assets method over gRPC. Exports assets with time and resource types to a given Cloud @@ -365,18 +372,18 @@ def export_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ExportAssets', + if "export_assets" not in self._stubs: + self._stubs["export_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ExportAssets", request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_assets'] + return self._stubs["export_assets"] @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - Awaitable[asset_service.ListAssetsResponse]]: + def list_assets( + self, + ) -> Callable[[asset_service.ListAssetsRequest], Awaitable[asset_service.ListAssetsResponse]]: r"""Return a callable for the list assets method over gRPC. Lists assets with time and resource types and returns @@ -392,18 +399,18 @@ def list_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListAssets', + if "list_assets" not in self._stubs: + self._stubs["list_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListAssets", request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, ) - return self._stubs['list_assets'] + return self._stubs["list_assets"] @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - Awaitable[asset_service.BatchGetAssetsHistoryResponse]]: + def batch_get_assets_history( + self, + ) -> Callable[[asset_service.BatchGetAssetsHistoryRequest], Awaitable[asset_service.BatchGetAssetsHistoryResponse]]: r"""Return a callable for the batch get assets history method over gRPC. Batch gets the update history of assets that overlap a time @@ -424,18 +431,18 @@ def batch_get_assets_history(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', + if "batch_get_assets_history" not in self._stubs: + self._stubs["batch_get_assets_history"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory", request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, ) - return self._stubs['batch_get_assets_history'] + return self._stubs["batch_get_assets_history"] @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - Awaitable[asset_service.Feed]]: + def create_feed( + self, + ) -> Callable[[asset_service.CreateFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the create feed method over gRPC. Creates a feed in a parent @@ -452,18 +459,18 @@ def create_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateFeed', + if "create_feed" not in self._stubs: + self._stubs["create_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateFeed", request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['create_feed'] + return self._stubs["create_feed"] @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - Awaitable[asset_service.Feed]]: + def get_feed( + self, + ) -> Callable[[asset_service.GetFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the get feed method over gRPC. Gets details about an asset feed. @@ -478,18 +485,18 @@ def get_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetFeed', + if "get_feed" not in self._stubs: + self._stubs["get_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetFeed", request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['get_feed'] + return self._stubs["get_feed"] @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - Awaitable[asset_service.ListFeedsResponse]]: + def list_feeds( + self, + ) -> Callable[[asset_service.ListFeedsRequest], Awaitable[asset_service.ListFeedsResponse]]: r"""Return a callable for the list feeds method over gRPC. Lists all asset feeds in a parent @@ -505,18 +512,18 @@ def list_feeds(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListFeeds', + if "list_feeds" not in self._stubs: + self._stubs["list_feeds"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListFeeds", request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, ) - return self._stubs['list_feeds'] + return self._stubs["list_feeds"] @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - Awaitable[asset_service.Feed]]: + def update_feed( + self, + ) -> Callable[[asset_service.UpdateFeedRequest], Awaitable[asset_service.Feed]]: r"""Return a callable for the update feed method over gRPC. Updates an asset feed configuration. @@ -531,18 +538,18 @@ def update_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateFeed', + if "update_feed" not in self._stubs: + self._stubs["update_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateFeed", request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, ) - return self._stubs['update_feed'] + return self._stubs["update_feed"] @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - Awaitable[empty_pb2.Empty]]: + def delete_feed( + self, + ) -> Callable[[asset_service.DeleteFeedRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete feed method over gRPC. Deletes an asset feed. @@ -557,18 +564,18 @@ def delete_feed(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteFeed', + if "delete_feed" not in self._stubs: + self._stubs["delete_feed"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteFeed", request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_feed'] + return self._stubs["delete_feed"] @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - Awaitable[asset_service.SearchAllResourcesResponse]]: + def search_all_resources( + self, + ) -> Callable[[asset_service.SearchAllResourcesRequest], Awaitable[asset_service.SearchAllResourcesResponse]]: r"""Return a callable for the search all resources method over gRPC. Searches all Google Cloud resources within the specified scope, @@ -586,18 +593,18 @@ def search_all_resources(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllResources', + if "search_all_resources" not in self._stubs: + self._stubs["search_all_resources"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllResources", request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, ) - return self._stubs['search_all_resources'] + return self._stubs["search_all_resources"] @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - Awaitable[asset_service.SearchAllIamPoliciesResponse]]: + def search_all_iam_policies( + self, + ) -> Callable[[asset_service.SearchAllIamPoliciesRequest], Awaitable[asset_service.SearchAllIamPoliciesResponse]]: r"""Return a callable for the search all iam policies method over gRPC. Searches all IAM policies within the specified scope, such as a @@ -615,18 +622,18 @@ def search_all_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', + if "search_all_iam_policies" not in self._stubs: + self._stubs["search_all_iam_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/SearchAllIamPolicies", request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, ) - return self._stubs['search_all_iam_policies'] + return self._stubs["search_all_iam_policies"] @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - Awaitable[asset_service.AnalyzeIamPolicyResponse]]: + def analyze_iam_policy( + self, + ) -> Callable[[asset_service.AnalyzeIamPolicyRequest], Awaitable[asset_service.AnalyzeIamPolicyResponse]]: r"""Return a callable for the analyze iam policy method over gRPC. Analyzes IAM policies to answer which identities have @@ -642,18 +649,18 @@ def analyze_iam_policy(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', + if "analyze_iam_policy" not in self._stubs: + self._stubs["analyze_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy", request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, ) - return self._stubs['analyze_iam_policy'] + return self._stubs["analyze_iam_policy"] @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - Awaitable[operations_pb2.Operation]]: + def analyze_iam_policy_longrunning( + self, + ) -> Callable[[asset_service.AnalyzeIamPolicyLongrunningRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the analyze iam policy longrunning method over gRPC. Analyzes IAM policies asynchronously to answer which identities @@ -679,18 +686,18 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', + if "analyze_iam_policy_longrunning" not in self._stubs: + self._stubs["analyze_iam_policy_longrunning"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning", request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['analyze_iam_policy_longrunning'] + return self._stubs["analyze_iam_policy_longrunning"] @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - Awaitable[asset_service.AnalyzeMoveResponse]]: + def analyze_move( + self, + ) -> Callable[[asset_service.AnalyzeMoveRequest], Awaitable[asset_service.AnalyzeMoveResponse]]: r"""Return a callable for the analyze move method over gRPC. Analyze moving a resource to a specified destination @@ -711,18 +718,18 @@ def analyze_move(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeMove', + if "analyze_move" not in self._stubs: + self._stubs["analyze_move"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeMove", request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, ) - return self._stubs['analyze_move'] + return self._stubs["analyze_move"] @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - Awaitable[asset_service.QueryAssetsResponse]]: + def query_assets( + self, + ) -> Callable[[asset_service.QueryAssetsRequest], Awaitable[asset_service.QueryAssetsResponse]]: r"""Return a callable for the query assets method over gRPC. Issue a job that queries assets using a SQL statement compatible @@ -752,18 +759,18 @@ def query_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/QueryAssets', + if "query_assets" not in self._stubs: + self._stubs["query_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/QueryAssets", request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, ) - return self._stubs['query_assets'] + return self._stubs["query_assets"] @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def create_saved_query( + self, + ) -> Callable[[asset_service.CreateSavedQueryRequest], Awaitable[asset_service.SavedQuery]]: r"""Return a callable for the create saved query method over gRPC. Creates a saved query in a parent @@ -779,18 +786,18 @@ def create_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + if "create_saved_query" not in self._stubs: + self._stubs["create_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/CreateSavedQuery", request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['create_saved_query'] + return self._stubs["create_saved_query"] @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def get_saved_query( + self, + ) -> Callable[[asset_service.GetSavedQueryRequest], Awaitable[asset_service.SavedQuery]]: r"""Return a callable for the get saved query method over gRPC. Gets details about a saved query. @@ -805,18 +812,18 @@ def get_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/GetSavedQuery', + if "get_saved_query" not in self._stubs: + self._stubs["get_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/GetSavedQuery", request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['get_saved_query'] + return self._stubs["get_saved_query"] @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - Awaitable[asset_service.ListSavedQueriesResponse]]: + def list_saved_queries( + self, + ) -> Callable[[asset_service.ListSavedQueriesRequest], Awaitable[asset_service.ListSavedQueriesResponse]]: r"""Return a callable for the list saved queries method over gRPC. Lists all saved queries in a parent @@ -832,18 +839,18 @@ def list_saved_queries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/ListSavedQueries', + if "list_saved_queries" not in self._stubs: + self._stubs["list_saved_queries"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/ListSavedQueries", request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, ) - return self._stubs['list_saved_queries'] + return self._stubs["list_saved_queries"] @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - Awaitable[asset_service.SavedQuery]]: + def update_saved_query( + self, + ) -> Callable[[asset_service.UpdateSavedQueryRequest], Awaitable[asset_service.SavedQuery]]: r"""Return a callable for the update saved query method over gRPC. Updates a saved query. @@ -858,18 +865,18 @@ def update_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + if "update_saved_query" not in self._stubs: + self._stubs["update_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/UpdateSavedQuery", request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, ) - return self._stubs['update_saved_query'] + return self._stubs["update_saved_query"] @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - Awaitable[empty_pb2.Empty]]: + def delete_saved_query( + self, + ) -> Callable[[asset_service.DeleteSavedQueryRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete saved query method over gRPC. Deletes a saved query. @@ -884,18 +891,18 @@ def delete_saved_query(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + if "delete_saved_query" not in self._stubs: + self._stubs["delete_saved_query"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/DeleteSavedQuery", request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_saved_query'] + return self._stubs["delete_saved_query"] @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse]]: + def batch_get_effective_iam_policies( + self, + ) -> Callable[[asset_service.BatchGetEffectiveIamPoliciesRequest], Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse]]: r"""Return a callable for the batch get effective iam policies method over gRPC. @@ -911,18 +918,18 @@ def batch_get_effective_iam_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', + if "batch_get_effective_iam_policies" not in self._stubs: + self._stubs["batch_get_effective_iam_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies", request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, ) - return self._stubs['batch_get_effective_iam_policies'] + return self._stubs["batch_get_effective_iam_policies"] @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - Awaitable[asset_service.AnalyzeOrgPoliciesResponse]]: + def analyze_org_policies( + self, + ) -> Callable[[asset_service.AnalyzeOrgPoliciesRequest], Awaitable[asset_service.AnalyzeOrgPoliciesResponse]]: r"""Return a callable for the analyze org policies method over gRPC. Analyzes organization policies under a scope. @@ -937,18 +944,18 @@ def analyze_org_policies(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + if "analyze_org_policies" not in self._stubs: + self._stubs["analyze_org_policies"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies", request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, ) - return self._stubs['analyze_org_policies'] + return self._stubs["analyze_org_policies"] @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]]: + def analyze_org_policy_governed_containers( + self, + ) -> Callable[[asset_service.AnalyzeOrgPolicyGovernedContainersRequest], Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]]: r"""Return a callable for the analyze org policy governed containers method over gRPC. @@ -965,18 +972,18 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', + if "analyze_org_policy_governed_containers" not in self._stubs: + self._stubs["analyze_org_policy_governed_containers"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers", request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, ) - return self._stubs['analyze_org_policy_governed_containers'] + return self._stubs["analyze_org_policy_governed_containers"] @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]]: + def analyze_org_policy_governed_assets( + self, + ) -> Callable[[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]]: r"""Return a callable for the analyze org policy governed assets method over gRPC. @@ -1010,16 +1017,16 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( - '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', + if "analyze_org_policy_governed_assets" not in self._stubs: + self._stubs["analyze_org_policy_governed_assets"] = self._logged_channel.unary_unary( + "/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets", request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, ) - return self._stubs['analyze_org_policy_governed_assets'] + return self._stubs["analyze_org_policy_governed_assets"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.export_assets: self._wrap_method( self.export_assets, @@ -1228,8 +1235,7 @@ def kind(self) -> str: def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1243,6 +1249,4 @@ def get_operation( return self._stubs["get_operation"] -__all__ = ( - 'AssetServiceGrpcAsyncIOTransport', -) +__all__ = ("AssetServiceGrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index b7f32b5fc7..f1c9f95afa 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -49,6 +49,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -261,7 +262,12 @@ def post_update_saved_query(self, response): """ - def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_analyze_iam_policy( + self, + request: asset_service.AnalyzeIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_iam_policy Override in a subclass to manipulate the request or metadata @@ -269,7 +275,10 @@ def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, """ return request, metadata - def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyResponse) -> asset_service.AnalyzeIamPolicyResponse: + def post_analyze_iam_policy( + self, + response: asset_service.AnalyzeIamPolicyResponse, + ) -> asset_service.AnalyzeIamPolicyResponse: """Post-rpc interceptor for analyze_iam_policy DEPRECATED. Please use the `post_analyze_iam_policy_with_metadata` @@ -282,7 +291,11 @@ def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyRespon """ return response - def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeIamPolicyResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_iam_policy_with_metadata( + self, + response: asset_service.AnalyzeIamPolicyResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for analyze_iam_policy Override in a subclass to read or manipulate the response or metadata after it @@ -297,7 +310,11 @@ def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeI """ return response, metadata - def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_iam_policy_longrunning( + self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_iam_policy_longrunning Override in a subclass to manipulate the request or metadata @@ -305,7 +322,10 @@ def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPo """ return request, metadata - def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_analyze_iam_policy_longrunning( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for analyze_iam_policy_longrunning DEPRECATED. Please use the `post_analyze_iam_policy_longrunning_with_metadata` @@ -318,7 +338,11 @@ def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation """ return response - def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_iam_policy_longrunning_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for analyze_iam_policy_longrunning Override in a subclass to read or manipulate the response or metadata after it @@ -333,7 +357,11 @@ def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations """ return response, metadata - def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_move( + self, + request: asset_service.AnalyzeMoveRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_move Override in a subclass to manipulate the request or metadata @@ -341,7 +369,10 @@ def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: """ return request, metadata - def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asset_service.AnalyzeMoveResponse: + def post_analyze_move( + self, + response: asset_service.AnalyzeMoveResponse, + ) -> asset_service.AnalyzeMoveResponse: """Post-rpc interceptor for analyze_move DEPRECATED. Please use the `post_analyze_move_with_metadata` @@ -354,7 +385,11 @@ def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asse """ return response - def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_move_with_metadata( + self, + response: asset_service.AnalyzeMoveResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for analyze_move Override in a subclass to read or manipulate the response or metadata after it @@ -369,7 +404,11 @@ def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveRes """ return response, metadata - def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policies( + self, + request: asset_service.AnalyzeOrgPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policies Override in a subclass to manipulate the request or metadata @@ -377,7 +416,10 @@ def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequ """ return request, metadata - def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesResponse) -> asset_service.AnalyzeOrgPoliciesResponse: + def post_analyze_org_policies( + self, + response: asset_service.AnalyzeOrgPoliciesResponse, + ) -> asset_service.AnalyzeOrgPoliciesResponse: """Post-rpc interceptor for analyze_org_policies DEPRECATED. Please use the `post_analyze_org_policies_with_metadata` @@ -390,7 +432,11 @@ def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesRe """ return response - def post_analyze_org_policies_with_metadata(self, response: asset_service.AnalyzeOrgPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policies_with_metadata( + self, + response: asset_service.AnalyzeOrgPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeOrgPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for analyze_org_policies Override in a subclass to read or manipulate the response or metadata after it @@ -405,7 +451,11 @@ def post_analyze_org_policies_with_metadata(self, response: asset_service.Analyz """ return response, metadata - def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policy_governed_assets( + self, + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policy_governed_assets Override in a subclass to manipulate the request or metadata @@ -413,7 +463,10 @@ def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeO """ return request, metadata - def post_analyze_org_policy_governed_assets(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + def post_analyze_org_policy_governed_assets( + self, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: """Post-rpc interceptor for analyze_org_policy_governed_assets DEPRECATED. Please use the `post_analyze_org_policy_governed_assets_with_metadata` @@ -426,7 +479,11 @@ def post_analyze_org_policy_governed_assets(self, response: asset_service.Analyz """ return response - def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policy_governed_assets_with_metadata( + self, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for analyze_org_policy_governed_assets Override in a subclass to read or manipulate the response or metadata after it @@ -441,7 +498,11 @@ def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_ """ return response, metadata - def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_analyze_org_policy_governed_containers( + self, + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policy_governed_containers Override in a subclass to manipulate the request or metadata @@ -449,7 +510,10 @@ def pre_analyze_org_policy_governed_containers(self, request: asset_service.Anal """ return request, metadata - def post_analyze_org_policy_governed_containers(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + def post_analyze_org_policy_governed_containers( + self, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: """Post-rpc interceptor for analyze_org_policy_governed_containers DEPRECATED. Please use the `post_analyze_org_policy_governed_containers_with_metadata` @@ -462,7 +526,11 @@ def post_analyze_org_policy_governed_containers(self, response: asset_service.An """ return response - def post_analyze_org_policy_governed_containers_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_analyze_org_policy_governed_containers_with_metadata( + self, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for analyze_org_policy_governed_containers Override in a subclass to read or manipulate the response or metadata after it @@ -477,7 +545,11 @@ def post_analyze_org_policy_governed_containers_with_metadata(self, response: as """ return response, metadata - def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_batch_get_assets_history( + self, + request: asset_service.BatchGetAssetsHistoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_get_assets_history Override in a subclass to manipulate the request or metadata @@ -485,7 +557,10 @@ def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHist """ return request, metadata - def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: + def post_batch_get_assets_history( + self, + response: asset_service.BatchGetAssetsHistoryResponse, + ) -> asset_service.BatchGetAssetsHistoryResponse: """Post-rpc interceptor for batch_get_assets_history DEPRECATED. Please use the `post_batch_get_assets_history_with_metadata` @@ -498,7 +573,11 @@ def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHi """ return response - def post_batch_get_assets_history_with_metadata(self, response: asset_service.BatchGetAssetsHistoryResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_batch_get_assets_history_with_metadata( + self, + response: asset_service.BatchGetAssetsHistoryResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.BatchGetAssetsHistoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for batch_get_assets_history Override in a subclass to read or manipulate the response or metadata after it @@ -513,7 +592,11 @@ def post_batch_get_assets_history_with_metadata(self, response: asset_service.Ba """ return response, metadata - def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_batch_get_effective_iam_policies( + self, + request: asset_service.BatchGetEffectiveIamPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_get_effective_iam_policies Override in a subclass to manipulate the request or metadata @@ -521,7 +604,10 @@ def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEf """ return request, metadata - def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + def post_batch_get_effective_iam_policies( + self, + response: asset_service.BatchGetEffectiveIamPoliciesResponse, + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: """Post-rpc interceptor for batch_get_effective_iam_policies DEPRECATED. Please use the `post_batch_get_effective_iam_policies_with_metadata` @@ -534,7 +620,11 @@ def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGet """ return response - def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_batch_get_effective_iam_policies_with_metadata( + self, + response: asset_service.BatchGetEffectiveIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for batch_get_effective_iam_policies Override in a subclass to read or manipulate the response or metadata after it @@ -549,7 +639,11 @@ def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_se """ return response, metadata - def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_feed( + self, + request: asset_service.CreateFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_feed Override in a subclass to manipulate the request or metadata @@ -557,7 +651,10 @@ def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Se """ return request, metadata - def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: + def post_create_feed( + self, + response: asset_service.Feed, + ) -> asset_service.Feed: """Post-rpc interceptor for create_feed DEPRECATED. Please use the `post_create_feed_with_metadata` @@ -570,7 +667,11 @@ def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_feed Override in a subclass to read or manipulate the response or metadata after it @@ -585,7 +686,11 @@ def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: """ return response, metadata - def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_saved_query( + self, + request: asset_service.CreateSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_saved_query Override in a subclass to manipulate the request or metadata @@ -593,7 +698,10 @@ def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, """ return request, metadata - def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_create_saved_query( + self, + response: asset_service.SavedQuery, + ) -> asset_service.SavedQuery: """Post-rpc interceptor for create_saved_query DEPRECATED. Please use the `post_create_saved_query_with_metadata` @@ -606,7 +714,11 @@ def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_s """ return response - def post_create_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -621,7 +733,11 @@ def post_create_saved_query_with_metadata(self, response: asset_service.SavedQue """ return response, metadata - def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_feed( + self, + request: asset_service.DeleteFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_feed Override in a subclass to manipulate the request or metadata @@ -629,7 +745,11 @@ def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Se """ return request, metadata - def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_saved_query( + self, + request: asset_service.DeleteSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_saved_query Override in a subclass to manipulate the request or metadata @@ -637,7 +757,11 @@ def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, """ return request, metadata - def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_export_assets( + self, + request: asset_service.ExportAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_assets Override in a subclass to manipulate the request or metadata @@ -645,7 +769,10 @@ def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata """ return request, metadata - def post_export_assets(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_export_assets( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for export_assets DEPRECATED. Please use the `post_export_assets_with_metadata` @@ -658,7 +785,11 @@ def post_export_assets(self, response: operations_pb2.Operation) -> operations_p """ return response - def post_export_assets_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_export_assets_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for export_assets Override in a subclass to read or manipulate the response or metadata after it @@ -673,7 +804,11 @@ def post_export_assets_with_metadata(self, response: operations_pb2.Operation, m """ return response, metadata - def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_feed( + self, + request: asset_service.GetFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_feed Override in a subclass to manipulate the request or metadata @@ -681,7 +816,10 @@ def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence """ return request, metadata - def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: + def post_get_feed( + self, + response: asset_service.Feed, + ) -> asset_service.Feed: """Post-rpc interceptor for get_feed DEPRECATED. Please use the `post_get_feed_with_metadata` @@ -694,7 +832,11 @@ def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_feed Override in a subclass to read or manipulate the response or metadata after it @@ -709,7 +851,11 @@ def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Se """ return response, metadata - def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_saved_query( + self, + request: asset_service.GetSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_saved_query Override in a subclass to manipulate the request or metadata @@ -717,7 +863,10 @@ def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metad """ return request, metadata - def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_get_saved_query( + self, + response: asset_service.SavedQuery, + ) -> asset_service.SavedQuery: """Post-rpc interceptor for get_saved_query DEPRECATED. Please use the `post_get_saved_query_with_metadata` @@ -730,7 +879,11 @@ def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_serv """ return response - def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -745,7 +898,11 @@ def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, """ return response, metadata - def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_assets( + self, + request: asset_service.ListAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_assets Override in a subclass to manipulate the request or metadata @@ -753,7 +910,10 @@ def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Se """ return request, metadata - def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_service.ListAssetsResponse: + def post_list_assets( + self, + response: asset_service.ListAssetsResponse, + ) -> asset_service.ListAssetsResponse: """Post-rpc interceptor for list_assets DEPRECATED. Please use the `post_list_assets_with_metadata` @@ -766,7 +926,11 @@ def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_ """ return response - def post_list_assets_with_metadata(self, response: asset_service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_assets_with_metadata( + self, + response: asset_service.ListAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_assets Override in a subclass to read or manipulate the response or metadata after it @@ -781,7 +945,11 @@ def post_list_assets_with_metadata(self, response: asset_service.ListAssetsRespo """ return response, metadata - def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_feeds( + self, + request: asset_service.ListFeedsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_feeds Override in a subclass to manipulate the request or metadata @@ -789,7 +957,10 @@ def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequ """ return request, metadata - def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: + def post_list_feeds( + self, + response: asset_service.ListFeedsResponse, + ) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds DEPRECATED. Please use the `post_list_feeds_with_metadata` @@ -802,7 +973,11 @@ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_se """ return response - def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_feeds_with_metadata( + self, + response: asset_service.ListFeedsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_feeds Override in a subclass to read or manipulate the response or metadata after it @@ -817,7 +992,11 @@ def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsRespons """ return response, metadata - def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_saved_queries( + self, + request: asset_service.ListSavedQueriesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_saved_queries Override in a subclass to manipulate the request or metadata @@ -825,7 +1004,10 @@ def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, """ return request, metadata - def post_list_saved_queries(self, response: asset_service.ListSavedQueriesResponse) -> asset_service.ListSavedQueriesResponse: + def post_list_saved_queries( + self, + response: asset_service.ListSavedQueriesResponse, + ) -> asset_service.ListSavedQueriesResponse: """Post-rpc interceptor for list_saved_queries DEPRECATED. Please use the `post_list_saved_queries_with_metadata` @@ -838,7 +1020,11 @@ def post_list_saved_queries(self, response: asset_service.ListSavedQueriesRespon """ return response - def post_list_saved_queries_with_metadata(self, response: asset_service.ListSavedQueriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_saved_queries_with_metadata( + self, + response: asset_service.ListSavedQueriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_saved_queries Override in a subclass to read or manipulate the response or metadata after it @@ -853,7 +1039,11 @@ def post_list_saved_queries_with_metadata(self, response: asset_service.ListSave """ return response, metadata - def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_query_assets( + self, + request: asset_service.QueryAssetsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for query_assets Override in a subclass to manipulate the request or metadata @@ -861,7 +1051,10 @@ def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: """ return request, metadata - def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asset_service.QueryAssetsResponse: + def post_query_assets( + self, + response: asset_service.QueryAssetsResponse, + ) -> asset_service.QueryAssetsResponse: """Post-rpc interceptor for query_assets DEPRECATED. Please use the `post_query_assets_with_metadata` @@ -874,7 +1067,11 @@ def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asse """ return response - def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_query_assets_with_metadata( + self, + response: asset_service.QueryAssetsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for query_assets Override in a subclass to read or manipulate the response or metadata after it @@ -889,7 +1086,11 @@ def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsRes """ return response, metadata - def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_search_all_iam_policies( + self, + request: asset_service.SearchAllIamPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for search_all_iam_policies Override in a subclass to manipulate the request or metadata @@ -897,7 +1098,10 @@ def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPolicie """ return request, metadata - def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPoliciesResponse) -> asset_service.SearchAllIamPoliciesResponse: + def post_search_all_iam_policies( + self, + response: asset_service.SearchAllIamPoliciesResponse, + ) -> asset_service.SearchAllIamPoliciesResponse: """Post-rpc interceptor for search_all_iam_policies DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` @@ -910,7 +1114,11 @@ def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPolic """ return response - def post_search_all_iam_policies_with_metadata(self, response: asset_service.SearchAllIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_search_all_iam_policies_with_metadata( + self, + response: asset_service.SearchAllIamPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SearchAllIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for search_all_iam_policies Override in a subclass to read or manipulate the response or metadata after it @@ -925,7 +1133,11 @@ def post_search_all_iam_policies_with_metadata(self, response: asset_service.Sea """ return response, metadata - def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_search_all_resources( + self, + request: asset_service.SearchAllResourcesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for search_all_resources Override in a subclass to manipulate the request or metadata @@ -933,7 +1145,10 @@ def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequ """ return request, metadata - def post_search_all_resources(self, response: asset_service.SearchAllResourcesResponse) -> asset_service.SearchAllResourcesResponse: + def post_search_all_resources( + self, + response: asset_service.SearchAllResourcesResponse, + ) -> asset_service.SearchAllResourcesResponse: """Post-rpc interceptor for search_all_resources DEPRECATED. Please use the `post_search_all_resources_with_metadata` @@ -946,7 +1161,11 @@ def post_search_all_resources(self, response: asset_service.SearchAllResourcesRe """ return response - def post_search_all_resources_with_metadata(self, response: asset_service.SearchAllResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_search_all_resources_with_metadata( + self, + response: asset_service.SearchAllResourcesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SearchAllResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for search_all_resources Override in a subclass to read or manipulate the response or metadata after it @@ -961,7 +1180,11 @@ def post_search_all_resources_with_metadata(self, response: asset_service.Search """ return response, metadata - def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_feed( + self, + request: asset_service.UpdateFeedRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_feed Override in a subclass to manipulate the request or metadata @@ -969,7 +1192,10 @@ def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Se """ return request, metadata - def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: + def post_update_feed( + self, + response: asset_service.Feed, + ) -> asset_service.Feed: """Post-rpc interceptor for update_feed DEPRECATED. Please use the `post_update_feed_with_metadata` @@ -982,7 +1208,11 @@ def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_feed_with_metadata( + self, + response: asset_service.Feed, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_feed Override in a subclass to read or manipulate the response or metadata after it @@ -997,7 +1227,11 @@ def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: """ return response, metadata - def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_saved_query( + self, + request: asset_service.UpdateSavedQueryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_saved_query Override in a subclass to manipulate the request or metadata @@ -1005,7 +1239,10 @@ def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, """ return request, metadata - def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + def post_update_saved_query( + self, + response: asset_service.SavedQuery, + ) -> asset_service.SavedQuery: """Post-rpc interceptor for update_saved_query DEPRECATED. Please use the `post_update_saved_query_with_metadata` @@ -1018,7 +1255,11 @@ def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_s """ return response - def post_update_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_saved_query_with_metadata( + self, + response: asset_service.SavedQuery, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_saved_query Override in a subclass to read or manipulate the response or metadata after it @@ -1034,7 +1275,9 @@ def post_update_saved_query_with_metadata(self, response: asset_service.SavedQue return response, metadata def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation @@ -1044,7 +1287,8 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -1074,29 +1318,30 @@ class AssetServiceRestTransport(_BaseAssetServiceRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[AssetServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AssetServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! Args: host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). + The hostname to connect to (default: "cloudasset.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -1135,10 +1380,9 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -1155,21 +1399,22 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', + "method": "get", + "uri": "/v1/{name=*/*/operations/*/**}", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -1188,26 +1433,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeIamPolicyResponse: + def __call__( + self, + request: asset_service.AnalyzeIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: r"""Call the analyze iam policy method over HTTP. Args: @@ -1238,21 +1485,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicy", "httpRequest": http_request, @@ -1261,7 +1508,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1283,13 +1537,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicy", "metadata": http_response["headers"], @@ -1310,29 +1564,30 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - r"""Call the analyze iam policy - longrunning method over HTTP. + def __call__( + self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the analyze iam policy longrunning method over HTTP. Args: request (~.asset_service.AnalyzeIamPolicyLongrunningRequest): @@ -1365,21 +1620,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicyLongrunning", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicyLongrunning", "httpRequest": http_request, @@ -1388,7 +1643,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1408,13 +1671,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy_longrunning", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicyLongrunning", "metadata": http_response["headers"], @@ -1435,26 +1698,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeMoveRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeMoveResponse: + def __call__( + self, + request: asset_service.AnalyzeMoveRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeMoveResponse: r"""Call the analyze move method over HTTP. Args: @@ -1485,21 +1750,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeMove", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeMove", "httpRequest": http_request, @@ -1508,7 +1773,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeMove._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeMove._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1530,13 +1802,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_move", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeMove", "metadata": http_response["headers"], @@ -1557,26 +1829,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPoliciesResponse: + def __call__( + self, + request: asset_service.AnalyzeOrgPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPoliciesResponse: r"""Call the analyze org policies method over HTTP. Args: @@ -1607,21 +1881,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicies", "httpRequest": http_request, @@ -1630,7 +1904,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1652,13 +1933,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicies", "metadata": http_response["headers"], @@ -1679,28 +1960,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: - r"""Call the analyze org policy - governed assets method over HTTP. + def __call__( + self, + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + r"""Call the analyze org policy governed assets method over HTTP. Args: request (~.asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): @@ -1730,21 +2012,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedAssets", "httpRequest": http_request, @@ -1753,7 +2035,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1775,13 +2064,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedAssets", "metadata": http_response["headers"], @@ -1802,28 +2091,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: - r"""Call the analyze org policy - governed containers method over HTTP. + def __call__( + self, + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + r"""Call the analyze org policy governed containers method over HTTP. Args: request (~.asset_service.AnalyzeOrgPolicyGovernedContainersRequest): @@ -1853,21 +2143,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedContainers", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedContainers", "httpRequest": http_request, @@ -1876,7 +2166,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1898,13 +2195,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedContainers", "metadata": http_response["headers"], @@ -1925,26 +2222,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.BatchGetAssetsHistoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.BatchGetAssetsHistoryResponse: + def __call__( + self, + request: asset_service.BatchGetAssetsHistoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Call the batch get assets history method over HTTP. Args: @@ -1972,21 +2271,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetAssetsHistory", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetAssetsHistory", "httpRequest": http_request, @@ -1995,7 +2294,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2017,13 +2323,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetAssetsHistory", "metadata": http_response["headers"], @@ -2044,28 +2350,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: - r"""Call the batch get effective iam - policies method over HTTP. + def __call__( + self, + request: asset_service.BatchGetEffectiveIamPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + r"""Call the batch get effective iam policies method over HTTP. Args: request (~.asset_service.BatchGetEffectiveIamPoliciesRequest): @@ -2095,21 +2402,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetEffectiveIamPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetEffectiveIamPolicies", "httpRequest": http_request, @@ -2118,7 +2425,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2140,13 +2454,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetEffectiveIamPolicies", "metadata": http_response["headers"], @@ -2167,27 +2481,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.CreateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.CreateFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the create feed method over HTTP. Args: @@ -2225,21 +2541,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateFeed", "httpRequest": http_request, @@ -2248,7 +2564,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._CreateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._CreateFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2270,13 +2594,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.create_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateFeed", "metadata": http_response["headers"], @@ -2297,27 +2621,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.CreateSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.CreateSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the create saved query method over HTTP. Args: @@ -2349,21 +2675,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateSavedQuery", "httpRequest": http_request, @@ -2372,7 +2698,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._CreateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._CreateSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2394,13 +2728,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.create_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateSavedQuery", "metadata": http_response["headers"], @@ -2421,26 +2755,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.DeleteFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): + def __call__( + self, + request: asset_service.DeleteFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): r"""Call the delete feed method over HTTP. Args: @@ -2464,21 +2800,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "DeleteFeed", "httpRequest": http_request, @@ -2487,7 +2823,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._DeleteFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._DeleteFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2506,26 +2849,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.DeleteSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ): + def __call__( + self, + request: asset_service.DeleteSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): r"""Call the delete saved query method over HTTP. Args: @@ -2549,21 +2894,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "DeleteSavedQuery", "httpRequest": http_request, @@ -2572,7 +2917,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._DeleteSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._DeleteSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2591,27 +2943,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.ExportAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: asset_service.ExportAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the export assets method over HTTP. Args: @@ -2644,21 +2998,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ExportAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ExportAssets", "httpRequest": http_request, @@ -2667,7 +3021,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ExportAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._ExportAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2687,13 +3049,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.export_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ExportAssets", "metadata": http_response["headers"], @@ -2714,26 +3076,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.GetFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.GetFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the get feed method over HTTP. Args: @@ -2769,21 +3133,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetFeed", "httpRequest": http_request, @@ -2792,7 +3156,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2814,13 +3185,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.get_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetFeed", "metadata": http_response["headers"], @@ -2841,26 +3212,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.GetSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.GetSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the get saved query method over HTTP. Args: @@ -2890,21 +3263,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetSavedQuery", "httpRequest": http_request, @@ -2913,7 +3286,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2935,13 +3315,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.get_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetSavedQuery", "metadata": http_response["headers"], @@ -2962,26 +3342,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListAssetsResponse: + def __call__( + self, + request: asset_service.ListAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListAssetsResponse: r"""Call the list assets method over HTTP. Args: @@ -3009,21 +3391,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListAssets", "httpRequest": http_request, @@ -3032,7 +3414,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3054,13 +3443,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListAssets", "metadata": http_response["headers"], @@ -3081,26 +3470,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListFeedsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListFeedsResponse: + def __call__( + self, + request: asset_service.ListFeedsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListFeedsResponse: r"""Call the list feeds method over HTTP. Args: @@ -3128,21 +3519,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListFeeds", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListFeeds", "httpRequest": http_request, @@ -3151,7 +3542,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListFeeds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListFeeds._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3173,13 +3571,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_feeds", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListFeeds", "metadata": http_response["headers"], @@ -3200,26 +3598,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.ListSavedQueriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.ListSavedQueriesResponse: + def __call__( + self, + request: asset_service.ListSavedQueriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.ListSavedQueriesResponse: r"""Call the list saved queries method over HTTP. Args: @@ -3247,21 +3647,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListSavedQueries", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListSavedQueries", "httpRequest": http_request, @@ -3270,7 +3670,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._ListSavedQueries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._ListSavedQueries._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3292,13 +3699,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.list_saved_queries", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListSavedQueries", "metadata": http_response["headers"], @@ -3319,27 +3726,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.QueryAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.QueryAssetsResponse: + def __call__( + self, + request: asset_service.QueryAssetsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.QueryAssetsResponse: r"""Call the query assets method over HTTP. Args: @@ -3369,21 +3778,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.QueryAssets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "QueryAssets", "httpRequest": http_request, @@ -3392,7 +3801,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._QueryAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._QueryAssets._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3414,13 +3831,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.query_assets", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "QueryAssets", "metadata": http_response["headers"], @@ -3441,26 +3858,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.SearchAllIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllIamPoliciesResponse: + def __call__( + self, + request: asset_service.SearchAllIamPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SearchAllIamPoliciesResponse: r"""Call the search all iam policies method over HTTP. Args: @@ -3488,21 +3907,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllIamPolicies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllIamPolicies", "httpRequest": http_request, @@ -3511,7 +3930,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._SearchAllIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._SearchAllIamPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3533,13 +3959,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_iam_policies", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllIamPolicies", "metadata": http_response["headers"], @@ -3560,26 +3986,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: asset_service.SearchAllResourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SearchAllResourcesResponse: + def __call__( + self, + request: asset_service.SearchAllResourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SearchAllResourcesResponse: r"""Call the search all resources method over HTTP. Args: @@ -3607,21 +4035,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllResources", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllResources", "httpRequest": http_request, @@ -3630,7 +4058,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._SearchAllResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._SearchAllResources._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3652,13 +4087,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_resources", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllResources", "metadata": http_response["headers"], @@ -3679,27 +4114,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.UpdateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.Feed: + def __call__( + self, + request: asset_service.UpdateFeedRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.Feed: r"""Call the update feed method over HTTP. Args: @@ -3737,21 +4174,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateFeed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateFeed", "httpRequest": http_request, @@ -3760,7 +4197,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._UpdateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._UpdateFeed._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3782,13 +4227,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.update_feed", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateFeed", "metadata": http_response["headers"], @@ -3809,27 +4254,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: asset_service.UpdateSavedQueryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> asset_service.SavedQuery: + def __call__( + self, + request: asset_service.UpdateSavedQueryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> asset_service.SavedQuery: r"""Call the update saved query method over HTTP. Args: @@ -3861,21 +4308,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateSavedQuery", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateSavedQuery", "httpRequest": http_request, @@ -3884,7 +4331,15 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._UpdateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = AssetServiceRestTransport._UpdateSavedQuery._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3906,13 +4361,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceClient.update_saved_query", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateSavedQuery", "metadata": http_response["headers"], @@ -3921,193 +4376,239 @@ def __call__(self, ) return resp + # fmt: off @property - def analyze_iam_policy(self) -> Callable[ - [asset_service.AnalyzeIamPolicyRequest], - asset_service.AnalyzeIamPolicyResponse]: + def analyze_iam_policy( + self + ) -> Callable[[asset_service.AnalyzeIamPolicyRequest], asset_service.AnalyzeIamPolicyResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def analyze_iam_policy_longrunning(self) -> Callable[ - [asset_service.AnalyzeIamPolicyLongrunningRequest], - operations_pb2.Operation]: + def analyze_iam_policy_longrunning( + self + ) -> Callable[[asset_service.AnalyzeIamPolicyLongrunningRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def analyze_move(self) -> Callable[ - [asset_service.AnalyzeMoveRequest], - asset_service.AnalyzeMoveResponse]: + def analyze_move( + self + ) -> Callable[[asset_service.AnalyzeMoveRequest], asset_service.AnalyzeMoveResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def analyze_org_policies(self) -> Callable[ - [asset_service.AnalyzeOrgPoliciesRequest], - asset_service.AnalyzeOrgPoliciesResponse]: + def analyze_org_policies( + self + ) -> Callable[[asset_service.AnalyzeOrgPoliciesRequest], asset_service.AnalyzeOrgPoliciesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def analyze_org_policy_governed_assets(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], - asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + def analyze_org_policy_governed_assets( + self + ) -> Callable[[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def analyze_org_policy_governed_containers(self) -> Callable[ - [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], - asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + def analyze_org_policy_governed_containers( + self + ) -> Callable[[asset_service.AnalyzeOrgPolicyGovernedContainersRequest], asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def batch_get_assets_history(self) -> Callable[ - [asset_service.BatchGetAssetsHistoryRequest], - asset_service.BatchGetAssetsHistoryResponse]: + def batch_get_assets_history( + self + ) -> Callable[[asset_service.BatchGetAssetsHistoryRequest], asset_service.BatchGetAssetsHistoryResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore + return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def batch_get_effective_iam_policies(self) -> Callable[ - [asset_service.BatchGetEffectiveIamPoliciesRequest], - asset_service.BatchGetEffectiveIamPoliciesResponse]: + def batch_get_effective_iam_policies( + self + ) -> Callable[[asset_service.BatchGetEffectiveIamPoliciesRequest], asset_service.BatchGetEffectiveIamPoliciesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def create_feed(self) -> Callable[ - [asset_service.CreateFeedRequest], - asset_service.Feed]: + def create_feed( + self + ) -> Callable[[asset_service.CreateFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore + return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def create_saved_query(self) -> Callable[ - [asset_service.CreateSavedQueryRequest], - asset_service.SavedQuery]: + def create_saved_query( + self + ) -> Callable[[asset_service.CreateSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def delete_feed(self) -> Callable[ - [asset_service.DeleteFeedRequest], - empty_pb2.Empty]: + def delete_feed( + self + ) -> Callable[[asset_service.DeleteFeedRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def delete_saved_query(self) -> Callable[ - [asset_service.DeleteSavedQueryRequest], - empty_pb2.Empty]: + def delete_saved_query( + self + ) -> Callable[[asset_service.DeleteSavedQueryRequest], empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def export_assets(self) -> Callable[ - [asset_service.ExportAssetsRequest], - operations_pb2.Operation]: + def export_assets( + self + ) -> Callable[[asset_service.ExportAssetsRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore + return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_feed(self) -> Callable[ - [asset_service.GetFeedRequest], - asset_service.Feed]: + def get_feed( + self + ) -> Callable[[asset_service.GetFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore + return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_saved_query(self) -> Callable[ - [asset_service.GetSavedQueryRequest], - asset_service.SavedQuery]: + def get_saved_query( + self + ) -> Callable[[asset_service.GetSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_assets(self) -> Callable[ - [asset_service.ListAssetsRequest], - asset_service.ListAssetsResponse]: + def list_assets( + self + ) -> Callable[[asset_service.ListAssetsRequest], asset_service.ListAssetsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore + return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_feeds(self) -> Callable[ - [asset_service.ListFeedsRequest], - asset_service.ListFeedsResponse]: + def list_feeds( + self + ) -> Callable[[asset_service.ListFeedsRequest], asset_service.ListFeedsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore + return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_saved_queries(self) -> Callable[ - [asset_service.ListSavedQueriesRequest], - asset_service.ListSavedQueriesResponse]: + def list_saved_queries( + self + ) -> Callable[[asset_service.ListSavedQueriesRequest], asset_service.ListSavedQueriesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore + return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def query_assets(self) -> Callable[ - [asset_service.QueryAssetsRequest], - asset_service.QueryAssetsResponse]: + def query_assets( + self + ) -> Callable[[asset_service.QueryAssetsRequest], asset_service.QueryAssetsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore + return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def search_all_iam_policies(self) -> Callable[ - [asset_service.SearchAllIamPoliciesRequest], - asset_service.SearchAllIamPoliciesResponse]: + def search_all_iam_policies( + self + ) -> Callable[[asset_service.SearchAllIamPoliciesRequest], asset_service.SearchAllIamPoliciesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def search_all_resources(self) -> Callable[ - [asset_service.SearchAllResourcesRequest], - asset_service.SearchAllResourcesResponse]: + def search_all_resources( + self + ) -> Callable[[asset_service.SearchAllResourcesRequest], asset_service.SearchAllResourcesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore + return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def update_feed(self) -> Callable[ - [asset_service.UpdateFeedRequest], - asset_service.Feed]: + def update_feed( + self + ) -> Callable[[asset_service.UpdateFeedRequest], asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def update_saved_query(self) -> Callable[ - [asset_service.UpdateSavedQueryRequest], - asset_service.SavedQuery]: + def update_saved_query( + self + ) -> Callable[[asset_service.UpdateSavedQueryRequest], asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + # fmt: on @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): def __hash__(self): @@ -4121,27 +4622,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -4168,21 +4670,21 @@ def __call__(self, query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetOperation", "httpRequest": http_request, @@ -4191,7 +4693,14 @@ def __call__(self, ) # Send the request - response = AssetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = AssetServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4209,12 +4718,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.asset_v1.AssetServiceAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetOperation", "httpResponse": http_response, @@ -4231,6 +4740,4 @@ def close(self): self._session.close() -__all__=( - 'AssetServiceRestTransport', -) +__all__ = ("AssetServiceRestTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py index 91d6091fe1..f2ed1740df 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py @@ -42,18 +42,20 @@ class _BaseAssetServiceRestTransport(AssetServiceTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'cloudasset.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "cloudasset.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'cloudasset.googleapis.com'). + The hostname to connect to (default: "cloudasset.googleapis.com"). credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -84,15 +86,18 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseAnalyzeIamPolicy: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "analysisQuery" : {}, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "analysisQuery": {}, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -100,10 +105,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicy', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{analysis_query.scope=*/*}:analyzeIamPolicy", + }, ] return http_options @@ -115,10 +121,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_unset_required_fields(query_params)) return query_params @@ -127,8 +135,10 @@ class _BaseAnalyzeIamPolicyLongrunning: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -136,11 +146,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning", + "body": "*", + }, ] return http_options @@ -155,16 +166,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_unset_required_fields(query_params)) return query_params @@ -173,8 +187,11 @@ class _BaseAnalyzeMove: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "destinationParent" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "destinationParent": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -182,10 +199,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=*/*}:analyzeMove', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=*/*}:analyzeMove", + }, ] return http_options @@ -197,10 +215,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeMove._get_unset_required_fields(query_params)) return query_params @@ -209,8 +229,11 @@ class _BaseAnalyzeOrgPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -218,10 +241,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicies', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicies", + }, ] return http_options @@ -233,10 +257,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_unset_required_fields(query_params)) return query_params @@ -245,8 +271,11 @@ class _BaseAnalyzeOrgPolicyGovernedAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -254,10 +283,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets", + }, ] return http_options @@ -269,10 +299,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_unset_required_fields(query_params)) return query_params @@ -281,8 +313,11 @@ class _BaseAnalyzeOrgPolicyGovernedContainers: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -290,10 +325,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers", + }, ] return http_options @@ -305,10 +341,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_unset_required_fields(query_params)) return query_params @@ -317,8 +355,10 @@ class _BaseBatchGetAssetsHistory: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -326,10 +366,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}:batchGetAssetsHistory", + }, ] return http_options @@ -341,10 +382,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_unset_required_fields(query_params)) return query_params @@ -353,8 +396,11 @@ class _BaseBatchGetEffectiveIamPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "names" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "names": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -362,10 +408,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}/effectiveIamPolicies:batchGet', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}/effectiveIamPolicies:batchGet", + }, ] return http_options @@ -377,10 +424,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_unset_required_fields(query_params)) return query_params @@ -389,8 +438,10 @@ class _BaseCreateFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -398,11 +449,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/feeds', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}/feeds", + "body": "*", + }, ] return http_options @@ -417,16 +469,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseCreateFeed._get_unset_required_fields(query_params)) return query_params @@ -435,8 +490,11 @@ class _BaseCreateSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "savedQueryId" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "savedQueryId": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -444,11 +502,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/savedQueries', - 'body': 'saved_query', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}/savedQueries", + "body": "saved_query", + }, ] return http_options @@ -463,16 +522,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_unset_required_fields(query_params)) return query_params @@ -481,8 +543,10 @@ class _BaseDeleteFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -490,10 +554,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/feeds/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=*/*/feeds/*}", + }, ] return http_options @@ -505,10 +570,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseDeleteFeed._get_unset_required_fields(query_params)) return query_params @@ -517,8 +584,10 @@ class _BaseDeleteSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -526,10 +595,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=*/*/savedQueries/*}", + }, ] return http_options @@ -541,10 +611,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_unset_required_fields(query_params)) return query_params @@ -553,8 +625,10 @@ class _BaseExportAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -562,11 +636,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:exportAssets', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}:exportAssets", + "body": "*", + }, ] return http_options @@ -581,16 +656,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseExportAssets._get_unset_required_fields(query_params)) return query_params @@ -599,8 +677,10 @@ class _BaseGetFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -608,10 +688,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/feeds/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/feeds/*}", + }, ] return http_options @@ -623,10 +704,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseGetFeed._get_unset_required_fields(query_params)) return query_params @@ -635,8 +718,10 @@ class _BaseGetSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -644,10 +729,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/savedQueries/*}", + }, ] return http_options @@ -659,10 +745,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseGetSavedQuery._get_unset_required_fields(query_params)) return query_params @@ -671,8 +759,10 @@ class _BaseListAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -680,10 +770,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/assets', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/assets", + }, ] return http_options @@ -695,10 +786,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseListAssets._get_unset_required_fields(query_params)) return query_params @@ -707,8 +800,10 @@ class _BaseListFeeds: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -716,10 +811,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/feeds', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/feeds", + }, ] return http_options @@ -731,10 +827,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseListFeeds._get_unset_required_fields(query_params)) return query_params @@ -743,8 +841,10 @@ class _BaseListSavedQueries: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -752,10 +852,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/savedQueries', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=*/*}/savedQueries", + }, ] return http_options @@ -767,10 +868,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseListSavedQueries._get_unset_required_fields(query_params)) return query_params @@ -779,8 +882,10 @@ class _BaseQueryAssets: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -788,11 +893,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:queryAssets', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=*/*}:queryAssets", + "body": "*", + }, ] return http_options @@ -807,16 +913,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseQueryAssets._get_unset_required_fields(query_params)) return query_params @@ -825,8 +934,10 @@ class _BaseSearchAllIamPolicies: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -834,10 +945,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllIamPolicies', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:searchAllIamPolicies", + }, ] return http_options @@ -849,10 +961,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_unset_required_fields(query_params)) return query_params @@ -861,8 +975,10 @@ class _BaseSearchAllResources: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -870,10 +986,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllResources', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{scope=*/*}:searchAllResources", + }, ] return http_options @@ -885,10 +1002,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllResources._get_unset_required_fields(query_params)) return query_params @@ -897,8 +1016,10 @@ class _BaseUpdateFeed: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -906,11 +1027,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{feed.name=*/*/feeds/*}', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{feed.name=*/*/feeds/*}", + "body": "*", + }, ] return http_options @@ -925,16 +1047,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseUpdateFeed._get_unset_required_fields(query_params)) return query_params @@ -943,8 +1068,11 @@ class _BaseUpdateSavedQuery: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -952,11 +1080,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{saved_query.name=*/*/savedQueries/*}', - 'body': 'saved_query', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{saved_query.name=*/*/savedQueries/*}", + "body": "saved_query", + }, ] return http_options @@ -971,16 +1100,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_unset_required_fields(query_params)) return query_params @@ -991,10 +1123,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=*/*/operations/*/**}", + }, ] return http_options @@ -1002,15 +1135,15 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseAssetServiceRestTransport', -) +__all__ = ("_BaseAssetServiceRestTransport",) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index d8a9b7f910..9badcfe051 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -96,81 +96,81 @@ ) __all__ = ( - 'AnalyzeIamPolicyLongrunningMetadata', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'BigQueryDestination', - 'CreateFeedRequest', - 'CreateSavedQueryRequest', - 'DeleteFeedRequest', - 'DeleteSavedQueryRequest', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'Feed', - 'FeedOutputConfig', - 'GcsDestination', - 'GcsOutputResult', - 'GetFeedRequest', - 'GetSavedQueryRequest', - 'IamPolicyAnalysisOutputConfig', - 'IamPolicyAnalysisQuery', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'OutputConfig', - 'OutputResult', - 'PartitionSpec', - 'PubsubDestination', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'SavedQuery', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'TableFieldSchema', - 'TableSchema', - 'UpdateFeedRequest', - 'UpdateSavedQueryRequest', - 'ContentType', - 'Asset', - 'AttachedResource', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', - 'IamPolicyAnalysisState', - 'IamPolicySearchResult', - 'RelatedAsset', - 'RelatedAssets', - 'RelatedResource', - 'RelatedResources', - 'RelationshipAttributes', - 'Resource', - 'ResourceSearchResult', - 'TemporalAsset', - 'TimeWindow', - 'VersionedResource', + "AnalyzeIamPolicyLongrunningMetadata", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "BigQueryDestination", + "CreateFeedRequest", + "CreateSavedQueryRequest", + "DeleteFeedRequest", + "DeleteSavedQueryRequest", + "ExportAssetsRequest", + "ExportAssetsResponse", + "Feed", + "FeedOutputConfig", + "GcsDestination", + "GcsOutputResult", + "GetFeedRequest", + "GetSavedQueryRequest", + "IamPolicyAnalysisOutputConfig", + "IamPolicyAnalysisQuery", + "ListAssetsRequest", + "ListAssetsResponse", + "ListFeedsRequest", + "ListFeedsResponse", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "OutputConfig", + "OutputResult", + "PartitionSpec", + "PubsubDestination", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "SavedQuery", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "TableFieldSchema", + "TableSchema", + "UpdateFeedRequest", + "UpdateSavedQueryRequest", + "ContentType", + "Asset", + "AttachedResource", + "ConditionEvaluation", + "IamPolicyAnalysisResult", + "IamPolicyAnalysisState", + "IamPolicySearchResult", + "RelatedAsset", + "RelatedAssets", + "RelatedResource", + "RelatedResources", + "RelationshipAttributes", + "Resource", + "ResourceSearchResult", + "TemporalAsset", + "TimeWindow", + "VersionedResource", ) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 3caaefee47..0f156b23b5 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -29,72 +29,74 @@ from google.type import expr_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.asset.v1', + package="google.cloud.asset.v1", manifest={ - 'ContentType', - 'AnalyzeIamPolicyLongrunningMetadata', - 'ExportAssetsRequest', - 'ExportAssetsResponse', - 'ListAssetsRequest', - 'ListAssetsResponse', - 'BatchGetAssetsHistoryRequest', - 'BatchGetAssetsHistoryResponse', - 'CreateFeedRequest', - 'GetFeedRequest', - 'ListFeedsRequest', - 'ListFeedsResponse', - 'UpdateFeedRequest', - 'DeleteFeedRequest', - 'OutputConfig', - 'OutputResult', - 'GcsOutputResult', - 'GcsDestination', - 'BigQueryDestination', - 'PartitionSpec', - 'PubsubDestination', - 'FeedOutputConfig', - 'Feed', - 'SearchAllResourcesRequest', - 'SearchAllResourcesResponse', - 'SearchAllIamPoliciesRequest', - 'SearchAllIamPoliciesResponse', - 'IamPolicyAnalysisQuery', - 'AnalyzeIamPolicyRequest', - 'AnalyzeIamPolicyResponse', - 'IamPolicyAnalysisOutputConfig', - 'AnalyzeIamPolicyLongrunningRequest', - 'AnalyzeIamPolicyLongrunningResponse', - 'SavedQuery', - 'CreateSavedQueryRequest', - 'GetSavedQueryRequest', - 'ListSavedQueriesRequest', - 'ListSavedQueriesResponse', - 'UpdateSavedQueryRequest', - 'DeleteSavedQueryRequest', - 'AnalyzeMoveRequest', - 'AnalyzeMoveResponse', - 'MoveAnalysis', - 'MoveAnalysisResult', - 'MoveImpact', - 'QueryAssetsOutputConfig', - 'QueryAssetsRequest', - 'QueryAssetsResponse', - 'QueryResult', - 'TableSchema', - 'TableFieldSchema', - 'BatchGetEffectiveIamPoliciesRequest', - 'BatchGetEffectiveIamPoliciesResponse', - 'AnalyzerOrgPolicy', - 'AnalyzerOrgPolicyConstraint', - 'AnalyzeOrgPoliciesRequest', - 'AnalyzeOrgPoliciesResponse', - 'AnalyzeOrgPolicyGovernedContainersRequest', - 'AnalyzeOrgPolicyGovernedContainersResponse', - 'AnalyzeOrgPolicyGovernedAssetsRequest', - 'AnalyzeOrgPolicyGovernedAssetsResponse', + "ContentType", + "AnalyzeIamPolicyLongrunningMetadata", + "ExportAssetsRequest", + "ExportAssetsResponse", + "ListAssetsRequest", + "ListAssetsResponse", + "BatchGetAssetsHistoryRequest", + "BatchGetAssetsHistoryResponse", + "CreateFeedRequest", + "GetFeedRequest", + "ListFeedsRequest", + "ListFeedsResponse", + "UpdateFeedRequest", + "DeleteFeedRequest", + "OutputConfig", + "OutputResult", + "GcsOutputResult", + "GcsDestination", + "BigQueryDestination", + "PartitionSpec", + "PubsubDestination", + "FeedOutputConfig", + "Feed", + "SearchAllResourcesRequest", + "SearchAllResourcesResponse", + "SearchAllIamPoliciesRequest", + "SearchAllIamPoliciesResponse", + "IamPolicyAnalysisQuery", + "AnalyzeIamPolicyRequest", + "AnalyzeIamPolicyResponse", + "IamPolicyAnalysisOutputConfig", + "AnalyzeIamPolicyLongrunningRequest", + "AnalyzeIamPolicyLongrunningResponse", + "SavedQuery", + "CreateSavedQueryRequest", + "GetSavedQueryRequest", + "ListSavedQueriesRequest", + "ListSavedQueriesResponse", + "UpdateSavedQueryRequest", + "DeleteSavedQueryRequest", + "AnalyzeMoveRequest", + "AnalyzeMoveResponse", + "MoveAnalysis", + "MoveAnalysisResult", + "MoveImpact", + "QueryAssetsOutputConfig", + "QueryAssetsRequest", + "QueryAssetsResponse", + "QueryResult", + "TableSchema", + "TableFieldSchema", + "BatchGetEffectiveIamPoliciesRequest", + "BatchGetEffectiveIamPoliciesResponse", + "AnalyzerOrgPolicy", + "AnalyzerOrgPolicyConstraint", + "AnalyzeOrgPoliciesRequest", + "AnalyzeOrgPoliciesResponse", + "AnalyzeOrgPolicyGovernedContainersRequest", + "AnalyzeOrgPolicyGovernedContainersResponse", + "AnalyzeOrgPolicyGovernedAssetsRequest", + "AnalyzeOrgPolicyGovernedAssetsResponse", }, ) +# fmt: on class ContentType(proto.Enum): @@ -224,15 +226,15 @@ class ExportAssetsRequest(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) - output_config: 'OutputConfig' = proto.Field( + output_config: "OutputConfig" = proto.Field( proto.MESSAGE, number=5, - message='OutputConfig', + message="OutputConfig", ) relationship_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -267,15 +269,15 @@ class ExportAssetsResponse(proto.Message): number=1, message=timestamp_pb2.Timestamp, ) - output_config: 'OutputConfig' = proto.Field( + output_config: "OutputConfig" = proto.Field( proto.MESSAGE, number=2, - message='OutputConfig', + message="OutputConfig", ) - output_result: 'OutputResult' = proto.Field( + output_result: "OutputResult" = proto.Field( proto.MESSAGE, number=3, - message='OutputResult', + message="OutputResult", ) @@ -368,10 +370,10 @@ class ListAssetsRequest(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) page_size: int = proto.Field( proto.INT32, @@ -479,10 +481,10 @@ class BatchGetAssetsHistoryRequest(proto.Message): proto.STRING, number=2, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=3, - enum='ContentType', + enum="ContentType", ) read_time_window: gca_assets.TimeWindow = proto.Field( proto.MESSAGE, @@ -543,10 +545,10 @@ class CreateFeedRequest(proto.Message): proto.STRING, number=2, ) - feed: 'Feed' = proto.Field( + feed: "Feed" = proto.Field( proto.MESSAGE, number=3, - message='Feed', + message="Feed", ) @@ -594,10 +596,10 @@ class ListFeedsResponse(proto.Message): A list of feeds. """ - feeds: MutableSequence['Feed'] = proto.RepeatedField( + feeds: MutableSequence["Feed"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Feed', + message="Feed", ) @@ -617,10 +619,10 @@ class UpdateFeedRequest(proto.Message): contain fields that are immutable or only set by the server. """ - feed: 'Feed' = proto.Field( + feed: "Feed" = proto.Field( proto.MESSAGE, number=1, - message='Feed', + message="Feed", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -669,17 +671,17 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - gcs_destination: 'GcsDestination' = proto.Field( + gcs_destination: "GcsDestination" = proto.Field( proto.MESSAGE, number=1, - oneof='destination', - message='GcsDestination', + oneof="destination", + message="GcsDestination", ) - bigquery_destination: 'BigQueryDestination' = proto.Field( + bigquery_destination: "BigQueryDestination" = proto.Field( proto.MESSAGE, number=2, - oneof='destination', - message='BigQueryDestination', + oneof="destination", + message="BigQueryDestination", ) @@ -695,11 +697,11 @@ class OutputResult(proto.Message): This field is a member of `oneof`_ ``result``. """ - gcs_result: 'GcsOutputResult' = proto.Field( + gcs_result: "GcsOutputResult" = proto.Field( proto.MESSAGE, number=1, - oneof='result', - message='GcsOutputResult', + oneof="result", + message="GcsOutputResult", ) @@ -759,12 +761,12 @@ class GcsDestination(proto.Message): uri: str = proto.Field( proto.STRING, number=1, - oneof='object_uri', + oneof="object_uri", ) uri_prefix: str = proto.Field( proto.STRING, number=2, - oneof='object_uri', + oneof="object_uri", ) @@ -864,10 +866,10 @@ class BigQueryDestination(proto.Message): proto.BOOL, number=3, ) - partition_spec: 'PartitionSpec' = proto.Field( + partition_spec: "PartitionSpec" = proto.Field( proto.MESSAGE, number=4, - message='PartitionSpec', + message="PartitionSpec", ) separate_tables_per_asset_type: bool = proto.Field( proto.BOOL, @@ -884,6 +886,7 @@ class PartitionSpec(proto.Message): The partition key for BigQuery partitioned table. """ + class PartitionKey(proto.Enum): r"""This enum is used to determine the partition key column when exporting assets to BigQuery partitioned table(s). Note that, if the @@ -948,11 +951,11 @@ class FeedOutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - pubsub_destination: 'PubsubDestination' = proto.Field( + pubsub_destination: "PubsubDestination" = proto.Field( proto.MESSAGE, number=1, - oneof='destination', - message='PubsubDestination', + oneof="destination", + message="PubsubDestination", ) @@ -1048,15 +1051,15 @@ class Feed(proto.Message): proto.STRING, number=3, ) - content_type: 'ContentType' = proto.Field( + content_type: "ContentType" = proto.Field( proto.ENUM, number=4, - enum='ContentType', + enum="ContentType", ) - feed_output_config: 'FeedOutputConfig' = proto.Field( + feed_output_config: "FeedOutputConfig" = proto.Field( proto.MESSAGE, number=5, - message='FeedOutputConfig', + message="FeedOutputConfig", ) condition: expr_pb2.Expr = proto.Field( proto.MESSAGE, @@ -1726,7 +1729,7 @@ class ConditionContext(proto.Message): access_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, - oneof='TimeContext', + oneof="TimeContext", message=timestamp_pb2.Timestamp, ) @@ -1802,10 +1805,10 @@ class AnalyzeIamPolicyRequest(proto.Message): Default is empty. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) saved_analysis_query: str = proto.Field( proto.STRING, @@ -1858,10 +1861,10 @@ class IamPolicyAnalysis(proto.Message): the query handling. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) analysis_results: MutableSequence[gca_assets.IamPolicyAnalysisResult] = proto.RepeatedField( proto.MESSAGE, @@ -1982,6 +1985,7 @@ class BigQueryDestination(proto.Message): successfully. Details are at https://cloud.google.com/bigquery/docs/loading-data-local#appending_to_or_overwriting_a_table_using_a_local_file. """ + class PartitionKey(proto.Enum): r"""This enum determines the partition key column for the bigquery tables. Partitioning can improve query performance and @@ -2011,10 +2015,10 @@ class PartitionKey(proto.Enum): proto.STRING, number=2, ) - partition_key: 'IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey' = proto.Field( + partition_key: "IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey" = proto.Field( proto.ENUM, number=3, - enum='IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey', + enum="IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey", ) write_disposition: str = proto.Field( proto.STRING, @@ -2024,13 +2028,13 @@ class PartitionKey(proto.Enum): gcs_destination: GcsDestination = proto.Field( proto.MESSAGE, number=1, - oneof='destination', + oneof="destination", message=GcsDestination, ) bigquery_destination: BigQueryDestination = proto.Field( proto.MESSAGE, number=2, - oneof='destination', + oneof="destination", message=BigQueryDestination, ) @@ -2066,26 +2070,25 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): where the results will be output to. """ - analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - message='IamPolicyAnalysisQuery', + message="IamPolicyAnalysisQuery", ) saved_analysis_query: str = proto.Field( proto.STRING, number=3, ) - output_config: 'IamPolicyAnalysisOutputConfig' = proto.Field( + output_config: "IamPolicyAnalysisOutputConfig" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisOutputConfig', + message="IamPolicyAnalysisOutputConfig", ) class AnalyzeIamPolicyLongrunningResponse(proto.Message): r"""A response message for [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - """ @@ -2139,11 +2142,11 @@ class QueryContent(proto.Message): This field is a member of `oneof`_ ``query_content``. """ - iam_policy_analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + iam_policy_analysis_query: "IamPolicyAnalysisQuery" = proto.Field( proto.MESSAGE, number=1, - oneof='query_content', - message='IamPolicyAnalysisQuery', + oneof="query_content", + message="IamPolicyAnalysisQuery", ) name: str = proto.Field( @@ -2216,10 +2219,10 @@ class CreateSavedQueryRequest(proto.Message): proto.STRING, number=1, ) - saved_query: 'SavedQuery' = proto.Field( + saved_query: "SavedQuery" = proto.Field( proto.MESSAGE, number=2, - message='SavedQuery', + message="SavedQuery", ) saved_query_id: str = proto.Field( proto.STRING, @@ -2317,10 +2320,10 @@ class ListSavedQueriesResponse(proto.Message): def raw_page(self): return self - saved_queries: MutableSequence['SavedQuery'] = proto.RepeatedField( + saved_queries: MutableSequence["SavedQuery"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='SavedQuery', + message="SavedQuery", ) next_page_token: str = proto.Field( proto.STRING, @@ -2345,10 +2348,10 @@ class UpdateSavedQueryRequest(proto.Message): Required. The list of fields to update. """ - saved_query: 'SavedQuery' = proto.Field( + saved_query: "SavedQuery" = proto.Field( proto.MESSAGE, number=1, - message='SavedQuery', + message="SavedQuery", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2400,6 +2403,7 @@ class AnalyzeMoveRequest(proto.Message): should be included in the analysis response. If unspecified, the default view is FULL. """ + class AnalysisView(proto.Enum): r"""View enum for supporting partial analysis responses. @@ -2445,10 +2449,10 @@ class AnalyzeMoveResponse(proto.Message): services. """ - move_analysis: MutableSequence['MoveAnalysis'] = proto.RepeatedField( + move_analysis: MutableSequence["MoveAnalysis"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='MoveAnalysis', + message="MoveAnalysis", ) @@ -2482,16 +2486,16 @@ class MoveAnalysis(proto.Message): proto.STRING, number=1, ) - analysis: 'MoveAnalysisResult' = proto.Field( + analysis: "MoveAnalysisResult" = proto.Field( proto.MESSAGE, number=2, - oneof='result', - message='MoveAnalysisResult', + oneof="result", + message="MoveAnalysisResult", ) error: status_pb2.Status = proto.Field( proto.MESSAGE, number=3, - oneof='result', + oneof="result", message=status_pb2.Status, ) @@ -2512,15 +2516,15 @@ class MoveAnalysisResult(proto.Message): but will not block moves at runtime. """ - blockers: MutableSequence['MoveImpact'] = proto.RepeatedField( + blockers: MutableSequence["MoveImpact"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='MoveImpact', + message="MoveImpact", ) - warnings: MutableSequence['MoveImpact'] = proto.RepeatedField( + warnings: MutableSequence["MoveImpact"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='MoveImpact', + message="MoveImpact", ) @@ -2688,12 +2692,12 @@ class QueryAssetsRequest(proto.Message): statement: str = proto.Field( proto.STRING, number=2, - oneof='query', + oneof="query", ) job_reference: str = proto.Field( proto.STRING, number=3, - oneof='query', + oneof="query", ) page_size: int = proto.Field( proto.INT32, @@ -2711,19 +2715,19 @@ class QueryAssetsRequest(proto.Message): read_time_window: gca_assets.TimeWindow = proto.Field( proto.MESSAGE, number=7, - oneof='time', + oneof="time", message=gca_assets.TimeWindow, ) read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=8, - oneof='time', + oneof="time", message=timestamp_pb2.Timestamp, ) - output_config: 'QueryAssetsOutputConfig' = proto.Field( + output_config: "QueryAssetsOutputConfig" = proto.Field( proto.MESSAGE, number=9, - message='QueryAssetsOutputConfig', + message="QueryAssetsOutputConfig", ) @@ -2776,20 +2780,20 @@ class QueryAssetsResponse(proto.Message): error: status_pb2.Status = proto.Field( proto.MESSAGE, number=3, - oneof='response', + oneof="response", message=status_pb2.Status, ) - query_result: 'QueryResult' = proto.Field( + query_result: "QueryResult" = proto.Field( proto.MESSAGE, number=4, - oneof='response', - message='QueryResult', + oneof="response", + message="QueryResult", ) - output_config: 'QueryAssetsOutputConfig' = proto.Field( + output_config: "QueryAssetsOutputConfig" = proto.Field( proto.MESSAGE, number=5, - oneof='response', - message='QueryAssetsOutputConfig', + oneof="response", + message="QueryAssetsOutputConfig", ) @@ -2821,10 +2825,10 @@ def raw_page(self): number=1, message=struct_pb2.Struct, ) - schema: 'TableSchema' = proto.Field( + schema: "TableSchema" = proto.Field( proto.MESSAGE, number=2, - message='TableSchema', + message="TableSchema", ) next_page_token: str = proto.Field( proto.STRING, @@ -2844,10 +2848,10 @@ class TableSchema(proto.Message): Describes the fields in a table. """ - fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + fields: MutableSequence["TableFieldSchema"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='TableFieldSchema', + message="TableFieldSchema", ) @@ -2898,10 +2902,10 @@ class TableFieldSchema(proto.Message): proto.STRING, number=3, ) - fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + fields: MutableSequence["TableFieldSchema"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='TableFieldSchema', + message="TableFieldSchema", ) @@ -3020,10 +3024,10 @@ class PolicyInfo(proto.Message): proto.STRING, number=1, ) - policies: MutableSequence['BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo'] = proto.RepeatedField( + policies: MutableSequence["BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo', + message="BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo", ) policy_results: MutableSequence[EffectiveIamPolicy] = proto.RepeatedField( @@ -3131,26 +3135,26 @@ class StringValues(proto.Message): number=2, ) - values: 'AnalyzerOrgPolicy.Rule.StringValues' = proto.Field( + values: "AnalyzerOrgPolicy.Rule.StringValues" = proto.Field( proto.MESSAGE, number=3, - oneof='kind', - message='AnalyzerOrgPolicy.Rule.StringValues', + oneof="kind", + message="AnalyzerOrgPolicy.Rule.StringValues", ) allow_all: bool = proto.Field( proto.BOOL, number=4, - oneof='kind', + oneof="kind", ) deny_all: bool = proto.Field( proto.BOOL, number=5, - oneof='kind', + oneof="kind", ) enforce: bool = proto.Field( proto.BOOL, number=6, - oneof='kind', + oneof="kind", ) condition: expr_pb2.Expr = proto.Field( proto.MESSAGE, @@ -3241,6 +3245,7 @@ class Constraint(proto.Message): This field is a member of `oneof`_ ``constraint_type``. """ + class ConstraintDefault(proto.Enum): r"""Specifies the default behavior in the absence of any ``Policy`` for the ``Constraint``. This must not be @@ -3298,7 +3303,6 @@ class BooleanConstraint(proto.Message): ``constraints/compute.disableSerialPortAccess``. If it is enforced on a VM instance, serial port connections will not be opened to that instance. - """ name: str = proto.Field( @@ -3313,22 +3317,22 @@ class BooleanConstraint(proto.Message): proto.STRING, number=3, ) - constraint_default: 'AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault' = proto.Field( + constraint_default: "AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault" = proto.Field( proto.ENUM, number=4, - enum='AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault', + enum="AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault", ) - list_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.ListConstraint' = proto.Field( + list_constraint: "AnalyzerOrgPolicyConstraint.Constraint.ListConstraint" = proto.Field( proto.MESSAGE, number=5, - oneof='constraint_type', - message='AnalyzerOrgPolicyConstraint.Constraint.ListConstraint', + oneof="constraint_type", + message="AnalyzerOrgPolicyConstraint.Constraint.ListConstraint", ) - boolean_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint' = proto.Field( + boolean_constraint: "AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint" = proto.Field( proto.MESSAGE, number=6, - oneof='constraint_type', - message='AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint', + oneof="constraint_type", + message="AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint", ) class CustomConstraint(proto.Message): @@ -3363,6 +3367,7 @@ class CustomConstraint(proto.Message): Detailed information about this custom policy constraint. """ + class MethodType(proto.Enum): r"""The operation in which this constraint will be applied. For example: If the constraint applies only when create VMs, the method_types @@ -3410,19 +3415,19 @@ class ActionType(proto.Enum): proto.STRING, number=2, ) - method_types: MutableSequence['AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType'] = proto.RepeatedField( + method_types: MutableSequence["AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType"] = proto.RepeatedField( proto.ENUM, number=3, - enum='AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType', + enum="AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType", ) condition: str = proto.Field( proto.STRING, number=4, ) - action_type: 'AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType' = proto.Field( + action_type: "AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType" = proto.Field( proto.ENUM, number=5, - enum='AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType', + enum="AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType", ) display_name: str = proto.Field( proto.STRING, @@ -3436,13 +3441,13 @@ class ActionType(proto.Enum): google_defined_constraint: Constraint = proto.Field( proto.MESSAGE, number=1, - oneof='constraint_definition', + oneof="constraint_definition", message=Constraint, ) custom_constraint: CustomConstraint = proto.Field( proto.MESSAGE, number=2, - oneof='constraint_definition', + oneof="constraint_definition", message=CustomConstraint, ) @@ -3549,15 +3554,15 @@ class OrgPolicyResult(proto.Message): also appear in the list. """ - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=1, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3569,10 +3574,10 @@ def raw_page(self): number=1, message=OrgPolicyResult, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, @@ -3699,15 +3704,15 @@ class GovernedContainer(proto.Message): proto.STRING, number=2, ) - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=3, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3719,10 +3724,10 @@ def raw_page(self): number=1, message=GovernedContainer, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, @@ -3963,27 +3968,27 @@ class GovernedAsset(proto.Message): also appear in the list. """ - governed_resource: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource' = proto.Field( + governed_resource: "AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource" = proto.Field( proto.MESSAGE, number=1, - oneof='governed_asset', - message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource', + oneof="governed_asset", + message="AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource", ) - governed_iam_policy: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy' = proto.Field( + governed_iam_policy: "AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy" = proto.Field( proto.MESSAGE, number=2, - oneof='governed_asset', - message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy', + oneof="governed_asset", + message="AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy", ) - consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + consolidated_policy: "AnalyzerOrgPolicy" = proto.Field( proto.MESSAGE, number=3, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) - policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + policy_bundle: MutableSequence["AnalyzerOrgPolicy"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='AnalyzerOrgPolicy', + message="AnalyzerOrgPolicy", ) @property @@ -3995,10 +4000,10 @@ def raw_page(self): number=1, message=GovernedAsset, ) - constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + constraint: "AnalyzerOrgPolicyConstraint" = proto.Field( proto.MESSAGE, number=2, - message='AnalyzerOrgPolicyConstraint', + message="AnalyzerOrgPolicyConstraint", ) next_page_token: str = proto.Field( proto.STRING, diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index d2097a07a4..99b3c9dbf0 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -30,27 +30,29 @@ from google.rpc import code_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.asset.v1', + package="google.cloud.asset.v1", manifest={ - 'TemporalAsset', - 'TimeWindow', - 'Asset', - 'Resource', - 'RelatedAssets', - 'RelationshipAttributes', - 'RelatedAsset', - 'ResourceSearchResult', - 'VersionedResource', - 'AttachedResource', - 'RelatedResources', - 'RelatedResource', - 'IamPolicySearchResult', - 'IamPolicyAnalysisState', - 'ConditionEvaluation', - 'IamPolicyAnalysisResult', + "TemporalAsset", + "TimeWindow", + "Asset", + "Resource", + "RelatedAssets", + "RelationshipAttributes", + "RelatedAsset", + "ResourceSearchResult", + "VersionedResource", + "AttachedResource", + "RelatedResources", + "RelatedResource", + "IamPolicySearchResult", + "IamPolicyAnalysisState", + "ConditionEvaluation", + "IamPolicyAnalysisResult", }, ) +# fmt: on class TemporalAsset(proto.Message): @@ -73,6 +75,7 @@ class TemporalAsset(proto.Message): PRESENT. Currently this is only set for responses in Real-Time Feed. """ + class PriorAssetState(proto.Enum): r"""State of prior asset. @@ -94,29 +97,29 @@ class PriorAssetState(proto.Enum): DOES_NOT_EXIST = 3 DELETED = 4 - window: 'TimeWindow' = proto.Field( + window: "TimeWindow" = proto.Field( proto.MESSAGE, number=1, - message='TimeWindow', + message="TimeWindow", ) deleted: bool = proto.Field( proto.BOOL, number=2, ) - asset: 'Asset' = proto.Field( + asset: "Asset" = proto.Field( proto.MESSAGE, number=3, - message='Asset', + message="Asset", ) prior_asset_state: PriorAssetState = proto.Field( proto.ENUM, number=4, enum=PriorAssetState, ) - prior_asset: 'Asset' = proto.Field( + prior_asset: "Asset" = proto.Field( proto.MESSAGE, number=5, - message='Asset', + message="Asset", ) @@ -253,10 +256,10 @@ class Asset(proto.Message): proto.STRING, number=2, ) - resource: 'Resource' = proto.Field( + resource: "Resource" = proto.Field( proto.MESSAGE, number=3, - message='Resource', + message="Resource", ) iam_policy: policy_pb2.Policy = proto.Field( proto.MESSAGE, @@ -271,19 +274,19 @@ class Asset(proto.Message): access_policy: access_policy_pb2.AccessPolicy = proto.Field( proto.MESSAGE, number=7, - oneof='access_context_policy', + oneof="access_context_policy", message=access_policy_pb2.AccessPolicy, ) access_level: access_level_pb2.AccessLevel = proto.Field( proto.MESSAGE, number=8, - oneof='access_context_policy', + oneof="access_context_policy", message=access_level_pb2.AccessLevel, ) service_perimeter: service_perimeter_pb2.ServicePerimeter = proto.Field( proto.MESSAGE, number=9, - oneof='access_context_policy', + oneof="access_context_policy", message=service_perimeter_pb2.ServicePerimeter, ) os_inventory: inventory_pb2.Inventory = proto.Field( @@ -291,15 +294,15 @@ class Asset(proto.Message): number=12, message=inventory_pb2.Inventory, ) - related_assets: 'RelatedAssets' = proto.Field( + related_assets: "RelatedAssets" = proto.Field( proto.MESSAGE, number=13, - message='RelatedAssets', + message="RelatedAssets", ) - related_asset: 'RelatedAsset' = proto.Field( + related_asset: "RelatedAsset" = proto.Field( proto.MESSAGE, number=15, - message='RelatedAsset', + message="RelatedAsset", ) ancestors: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -400,15 +403,15 @@ class RelatedAssets(proto.Message): The peer resources of the relationship. """ - relationship_attributes: 'RelationshipAttributes' = proto.Field( + relationship_attributes: "RelationshipAttributes" = proto.Field( proto.MESSAGE, number=1, - message='RelationshipAttributes', + message="RelationshipAttributes", ) - assets: MutableSequence['RelatedAsset'] = proto.RepeatedField( + assets: MutableSequence["RelatedAsset"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='RelatedAsset', + message="RelatedAsset", ) @@ -888,21 +891,21 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=19, ) - versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + versioned_resources: MutableSequence["VersionedResource"] = proto.RepeatedField( proto.MESSAGE, number=16, - message='VersionedResource', + message="VersionedResource", ) - attached_resources: MutableSequence['AttachedResource'] = proto.RepeatedField( + attached_resources: MutableSequence["AttachedResource"] = proto.RepeatedField( proto.MESSAGE, number=20, - message='AttachedResource', + message="AttachedResource", ) - relationships: MutableMapping[str, 'RelatedResources'] = proto.MapField( + relationships: MutableMapping[str, "RelatedResources"] = proto.MapField( proto.STRING, proto.MESSAGE, number=21, - message='RelatedResources', + message="RelatedResources", ) tag_keys: MutableSequence[str] = proto.RepeatedField( proto.STRING, @@ -985,10 +988,10 @@ class AttachedResource(proto.Message): proto.STRING, number=1, ) - versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + versioned_resources: MutableSequence["VersionedResource"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='VersionedResource', + message="VersionedResource", ) @@ -1001,10 +1004,10 @@ class RelatedResources(proto.Message): resource. """ - related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( + related_resources: MutableSequence["RelatedResource"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='RelatedResource', + message="RelatedResource", ) @@ -1142,11 +1145,11 @@ class Permissions(proto.Message): number=1, ) - matched_permissions: MutableMapping[str, 'IamPolicySearchResult.Explanation.Permissions'] = proto.MapField( + matched_permissions: MutableMapping[str, "IamPolicySearchResult.Explanation.Permissions"] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, - message='IamPolicySearchResult.Explanation.Permissions', + message="IamPolicySearchResult.Explanation.Permissions", ) resource: str = proto.Field( @@ -1219,6 +1222,7 @@ class ConditionEvaluation(proto.Message): evaluation_value (google.cloud.asset_v1.types.ConditionEvaluation.EvaluationValue): The evaluation result. """ + class EvaluationValue(proto.Enum): r"""Value of this expression. @@ -1290,10 +1294,10 @@ class Resource(proto.Message): proto.STRING, number=1, ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Access(proto.Message): @@ -1322,17 +1326,17 @@ class Access(proto.Message): role: str = proto.Field( proto.STRING, number=1, - oneof='oneof_access', + oneof="oneof_access", ) permission: str = proto.Field( proto.STRING, number=2, - oneof='oneof_access', + oneof="oneof_access", ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=3, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Identity(proto.Message): @@ -1360,10 +1364,10 @@ class Identity(proto.Message): proto.STRING, number=1, ) - analysis_state: 'IamPolicyAnalysisState' = proto.Field( + analysis_state: "IamPolicyAnalysisState" = proto.Field( proto.MESSAGE, number=2, - message='IamPolicyAnalysisState', + message="IamPolicyAnalysisState", ) class Edge(proto.Message): @@ -1437,25 +1441,25 @@ class AccessControlList(proto.Message): defined in the above IAM policy binding. """ - resources: MutableSequence['IamPolicyAnalysisResult.Resource'] = proto.RepeatedField( + resources: MutableSequence["IamPolicyAnalysisResult.Resource"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='IamPolicyAnalysisResult.Resource', + message="IamPolicyAnalysisResult.Resource", ) - accesses: MutableSequence['IamPolicyAnalysisResult.Access'] = proto.RepeatedField( + accesses: MutableSequence["IamPolicyAnalysisResult.Access"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='IamPolicyAnalysisResult.Access', + message="IamPolicyAnalysisResult.Access", ) - resource_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( + resource_edges: MutableSequence["IamPolicyAnalysisResult.Edge"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='IamPolicyAnalysisResult.Edge', + message="IamPolicyAnalysisResult.Edge", ) - condition_evaluation: 'ConditionEvaluation' = proto.Field( + condition_evaluation: "ConditionEvaluation" = proto.Field( proto.MESSAGE, number=4, - message='ConditionEvaluation', + message="ConditionEvaluation", ) class IdentityList(proto.Message): @@ -1483,15 +1487,15 @@ class IdentityList(proto.Message): enabled in request. """ - identities: MutableSequence['IamPolicyAnalysisResult.Identity'] = proto.RepeatedField( + identities: MutableSequence["IamPolicyAnalysisResult.Identity"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='IamPolicyAnalysisResult.Identity', + message="IamPolicyAnalysisResult.Identity", ) - group_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( + group_edges: MutableSequence["IamPolicyAnalysisResult.Edge"] = proto.RepeatedField( proto.MESSAGE, number=2, - message='IamPolicyAnalysisResult.Edge', + message="IamPolicyAnalysisResult.Edge", ) attached_resource_full_name: str = proto.Field( diff --git a/tests/integration/goldens/asset/noxfile.py b/tests/integration/goldens/asset/noxfile.py index 39fc961114..0ed6d5ce76 100755 --- a/tests/integration/goldens/asset/noxfile.py +++ b/tests/integration/goldens/asset/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -FORMAT_PATHS = ["google", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # We're most interested in ensuring that code is formatted properly diff --git a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index bd17439996..27018cf25a 100755 --- a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1009,7 +1009,7 @@ def test_export_assets_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ExportAssetsRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1021,7 +1021,7 @@ def test_export_assets_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ExportAssetsRequest( - parent='parent_value', + parent="parent_value", ) def test_export_assets_use_cached_wrapped_rpc(): @@ -1144,7 +1144,7 @@ def test_export_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ExportAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1176,7 +1176,7 @@ async def test_export_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ExportAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1218,7 +1218,7 @@ def test_list_assets(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_assets(request) @@ -1230,7 +1230,7 @@ def test_list_assets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_assets_non_empty_request_with_auto_populated_field(): @@ -1245,8 +1245,8 @@ def test_list_assets_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1258,8 +1258,8 @@ def test_list_assets_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListAssetsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_assets_use_cached_wrapped_rpc(): @@ -1345,7 +1345,7 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_assets(request) @@ -1357,7 +1357,7 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1373,7 +1373,7 @@ def test_list_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1405,7 +1405,7 @@ async def test_list_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1441,7 +1441,7 @@ def test_list_assets_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_assets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1449,7 +1449,7 @@ def test_list_assets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1463,7 +1463,7 @@ def test_list_assets_flattened_error(): with pytest.raises(ValueError): client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -1483,7 +1483,7 @@ async def test_list_assets_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_assets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1491,7 +1491,7 @@ async def test_list_assets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -1505,7 +1505,7 @@ async def test_list_assets_flattened_error_async(): with pytest.raises(ValueError): await client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1750,7 +1750,7 @@ def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.BatchGetAssetsHistoryRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1762,7 +1762,7 @@ def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetAssetsHistoryRequest( - parent='parent_value', + parent="parent_value", ) def test_batch_get_assets_history_use_cached_wrapped_rpc(): @@ -1874,7 +1874,7 @@ def test_batch_get_assets_history_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetAssetsHistoryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1906,7 +1906,7 @@ async def test_batch_get_assets_history_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetAssetsHistoryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1948,11 +1948,11 @@ def test_create_feed(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.create_feed(request) @@ -1964,11 +1964,11 @@ def test_create_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_create_feed_non_empty_request_with_auto_populated_field(): @@ -1983,8 +1983,8 @@ def test_create_feed_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', + parent="parent_value", + feed_id="feed_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1996,8 +1996,8 @@ def test_create_feed_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateFeedRequest( - parent='parent_value', - feed_id='feed_id_value', + parent="parent_value", + feed_id="feed_id_value", ) def test_create_feed_use_cached_wrapped_rpc(): @@ -2083,11 +2083,11 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], )) response = await client.create_feed(request) @@ -2099,11 +2099,11 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio @@ -2119,7 +2119,7 @@ def test_create_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.CreateFeedRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2151,7 +2151,7 @@ async def test_create_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.CreateFeedRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2187,7 +2187,7 @@ def test_create_feed_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_feed( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2195,7 +2195,7 @@ def test_create_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2209,7 +2209,7 @@ def test_create_feed_flattened_error(): with pytest.raises(ValueError): client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -2229,7 +2229,7 @@ async def test_create_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_feed( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2237,7 +2237,7 @@ async def test_create_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -2251,7 +2251,7 @@ async def test_create_feed_flattened_error_async(): with pytest.raises(ValueError): await client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) @@ -2275,11 +2275,11 @@ def test_get_feed(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.get_feed(request) @@ -2291,11 +2291,11 @@ def test_get_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_get_feed_non_empty_request_with_auto_populated_field(): @@ -2310,7 +2310,7 @@ def test_get_feed_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.GetFeedRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2322,7 +2322,7 @@ def test_get_feed_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetFeedRequest( - name='name_value', + name="name_value", ) def test_get_feed_use_cached_wrapped_rpc(): @@ -2408,11 +2408,11 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], )) response = await client.get_feed(request) @@ -2424,11 +2424,11 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio @@ -2444,7 +2444,7 @@ def test_get_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.GetFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2476,7 +2476,7 @@ async def test_get_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.GetFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2512,7 +2512,7 @@ def test_get_feed_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2520,7 +2520,7 @@ def test_get_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -2534,7 +2534,7 @@ def test_get_feed_flattened_error(): with pytest.raises(ValueError): client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -2554,7 +2554,7 @@ async def test_get_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2562,7 +2562,7 @@ async def test_get_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -2576,7 +2576,7 @@ async def test_get_feed_flattened_error_async(): with pytest.raises(ValueError): await client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) @@ -2625,7 +2625,7 @@ def test_list_feeds_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListFeedsRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2637,7 +2637,7 @@ def test_list_feeds_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListFeedsRequest( - parent='parent_value', + parent="parent_value", ) def test_list_feeds_use_cached_wrapped_rpc(): @@ -2749,7 +2749,7 @@ def test_list_feeds_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListFeedsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2781,7 +2781,7 @@ async def test_list_feeds_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListFeedsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2817,7 +2817,7 @@ def test_list_feeds_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_feeds( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2825,7 +2825,7 @@ def test_list_feeds_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2839,7 +2839,7 @@ def test_list_feeds_flattened_error(): with pytest.raises(ValueError): client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -2859,7 +2859,7 @@ async def test_list_feeds_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_feeds( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2867,7 +2867,7 @@ async def test_list_feeds_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -2881,7 +2881,7 @@ async def test_list_feeds_flattened_error_async(): with pytest.raises(ValueError): await client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -2905,11 +2905,11 @@ def test_update_feed(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) response = client.update_feed(request) @@ -2921,11 +2921,11 @@ def test_update_feed(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] def test_update_feed_non_empty_request_with_auto_populated_field(): @@ -3036,11 +3036,11 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], )) response = await client.update_feed(request) @@ -3052,11 +3052,11 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.asyncio @@ -3072,7 +3072,7 @@ def test_update_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.UpdateFeedRequest() - request.feed.name = 'name_value' + request.feed.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3104,7 +3104,7 @@ async def test_update_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.UpdateFeedRequest() - request.feed.name = 'name_value' + request.feed.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3140,7 +3140,7 @@ def test_update_feed_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_feed( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3148,7 +3148,7 @@ def test_update_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') + mock_val = asset_service.Feed(name="name_value") assert arg == mock_val @@ -3162,7 +3162,7 @@ def test_update_feed_flattened_error(): with pytest.raises(ValueError): client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) @pytest.mark.asyncio @@ -3182,7 +3182,7 @@ async def test_update_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_feed( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3190,7 +3190,7 @@ async def test_update_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].feed - mock_val = asset_service.Feed(name='name_value') + mock_val = asset_service.Feed(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -3204,7 +3204,7 @@ async def test_update_feed_flattened_error_async(): with pytest.raises(ValueError): await client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) @@ -3252,7 +3252,7 @@ def test_delete_feed_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.DeleteFeedRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3264,7 +3264,7 @@ def test_delete_feed_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteFeedRequest( - name='name_value', + name="name_value", ) def test_delete_feed_use_cached_wrapped_rpc(): @@ -3375,7 +3375,7 @@ def test_delete_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.DeleteFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3407,7 +3407,7 @@ async def test_delete_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.DeleteFeedRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3443,7 +3443,7 @@ def test_delete_feed_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3451,7 +3451,7 @@ def test_delete_feed_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -3465,7 +3465,7 @@ def test_delete_feed_flattened_error(): with pytest.raises(ValueError): client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -3485,7 +3485,7 @@ async def test_delete_feed_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_feed( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3493,7 +3493,7 @@ async def test_delete_feed_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -3507,7 +3507,7 @@ async def test_delete_feed_flattened_error_async(): with pytest.raises(ValueError): await client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) @@ -3531,7 +3531,7 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.search_all_resources(request) @@ -3543,7 +3543,7 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_search_all_resources_non_empty_request_with_auto_populated_field(): @@ -3558,10 +3558,10 @@ def test_search_all_resources_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3573,10 +3573,10 @@ def test_search_all_resources_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllResourcesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) def test_search_all_resources_use_cached_wrapped_rpc(): @@ -3662,7 +3662,7 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.search_all_resources(request) @@ -3674,7 +3674,7 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -3690,7 +3690,7 @@ def test_search_all_resources_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllResourcesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3722,7 +3722,7 @@ async def test_search_all_resources_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllResourcesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3758,9 +3758,9 @@ def test_search_all_resources_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) # Establish that the underlying call was made with the expected @@ -3768,13 +3768,13 @@ def test_search_all_resources_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val arg = args[0].asset_types - mock_val = ['asset_types_value'] + mock_val = ["asset_types_value"] assert arg == mock_val @@ -3788,9 +3788,9 @@ def test_search_all_resources_flattened_error(): with pytest.raises(ValueError): client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) @pytest.mark.asyncio @@ -3810,9 +3810,9 @@ async def test_search_all_resources_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.search_all_resources( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) # Establish that the underlying call was made with the expected @@ -3820,13 +3820,13 @@ async def test_search_all_resources_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val arg = args[0].asset_types - mock_val = ['asset_types_value'] + mock_val = ["asset_types_value"] assert arg == mock_val @pytest.mark.asyncio @@ -3840,9 +3840,9 @@ async def test_search_all_resources_flattened_error_async(): with pytest.raises(ValueError): await client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) @@ -4062,7 +4062,7 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.search_all_iam_policies(request) @@ -4074,7 +4074,7 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): @@ -4089,10 +4089,10 @@ def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4104,10 +4104,10 @@ def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllIamPoliciesRequest( - scope='scope_value', - query='query_value', - page_token='page_token_value', - order_by='order_by_value', + scope="scope_value", + query="query_value", + page_token="page_token_value", + order_by="order_by_value", ) def test_search_all_iam_policies_use_cached_wrapped_rpc(): @@ -4193,7 +4193,7 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.search_all_iam_policies(request) @@ -4205,7 +4205,7 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -4221,7 +4221,7 @@ def test_search_all_iam_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4253,7 +4253,7 @@ async def test_search_all_iam_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4289,8 +4289,8 @@ def test_search_all_iam_policies_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.search_all_iam_policies( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) # Establish that the underlying call was made with the expected @@ -4298,10 +4298,10 @@ def test_search_all_iam_policies_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val @@ -4315,8 +4315,8 @@ def test_search_all_iam_policies_flattened_error(): with pytest.raises(ValueError): client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) @pytest.mark.asyncio @@ -4336,8 +4336,8 @@ async def test_search_all_iam_policies_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.search_all_iam_policies( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) # Establish that the underlying call was made with the expected @@ -4345,10 +4345,10 @@ async def test_search_all_iam_policies_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].query - mock_val = 'query_value' + mock_val = "query_value" assert arg == mock_val @pytest.mark.asyncio @@ -4362,8 +4362,8 @@ async def test_search_all_iam_policies_flattened_error_async(): with pytest.raises(ValueError): await client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) @@ -4610,7 +4610,7 @@ def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeIamPolicyRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4622,7 +4622,7 @@ def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) def test_analyze_iam_policy_use_cached_wrapped_rpc(): @@ -4736,7 +4736,7 @@ def test_analyze_iam_policy_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4768,7 +4768,7 @@ async def test_analyze_iam_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4834,7 +4834,7 @@ def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_fi # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeIamPolicyLongrunningRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4846,7 +4846,7 @@ def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_fi call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest( - saved_analysis_query='saved_analysis_query_value', + saved_analysis_query="saved_analysis_query_value", ) def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): @@ -4969,7 +4969,7 @@ def test_analyze_iam_policy_longrunning_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyLongrunningRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5001,7 +5001,7 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyLongrunningRequest() - request.analysis_query.scope = 'scope_value' + request.analysis_query.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5068,8 +5068,8 @@ def test_analyze_move_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeMoveRequest( - resource='resource_value', - destination_parent='destination_parent_value', + resource="resource_value", + destination_parent="destination_parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5081,8 +5081,8 @@ def test_analyze_move_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeMoveRequest( - resource='resource_value', - destination_parent='destination_parent_value', + resource="resource_value", + destination_parent="destination_parent_value", ) def test_analyze_move_use_cached_wrapped_rpc(): @@ -5194,7 +5194,7 @@ def test_analyze_move_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeMoveRequest() - request.resource = 'resource_value' + request.resource = "resource_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5226,7 +5226,7 @@ async def test_analyze_move_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeMoveRequest() - request.resource = 'resource_value' + request.resource = "resource_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5268,7 +5268,7 @@ def test_query_assets(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', + job_reference="job_reference_value", done=True, ) response = client.query_assets(request) @@ -5281,7 +5281,7 @@ def test_query_assets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -5297,10 +5297,10 @@ def test_query_assets_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.QueryAssetsRequest( - parent='parent_value', - statement='statement_value', - job_reference='job_reference_value', - page_token='page_token_value', + parent="parent_value", + statement="statement_value", + job_reference="job_reference_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5312,10 +5312,10 @@ def test_query_assets_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.QueryAssetsRequest( - parent='parent_value', - statement='statement_value', - job_reference='job_reference_value', - page_token='page_token_value', + parent="parent_value", + statement="statement_value", + job_reference="job_reference_value", + page_token="page_token_value", ) def test_query_assets_use_cached_wrapped_rpc(): @@ -5401,7 +5401,7 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', + job_reference="job_reference_value", done=True, )) response = await client.query_assets(request) @@ -5414,7 +5414,7 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -5431,7 +5431,7 @@ def test_query_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.QueryAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5463,7 +5463,7 @@ async def test_query_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.QueryAssetsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5505,10 +5505,10 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.create_saved_query(request) @@ -5520,10 +5520,10 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_create_saved_query_non_empty_request_with_auto_populated_field(): @@ -5538,8 +5538,8 @@ def test_create_saved_query_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.CreateSavedQueryRequest( - parent='parent_value', - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query_id="saved_query_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5551,8 +5551,8 @@ def test_create_saved_query_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateSavedQueryRequest( - parent='parent_value', - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query_id="saved_query_id_value", ) def test_create_saved_query_use_cached_wrapped_rpc(): @@ -5638,10 +5638,10 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", )) response = await client.create_saved_query(request) @@ -5653,10 +5653,10 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio @@ -5672,7 +5672,7 @@ def test_create_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.CreateSavedQueryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5704,7 +5704,7 @@ async def test_create_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.CreateSavedQueryRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5740,9 +5740,9 @@ def test_create_saved_query_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_saved_query( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) # Establish that the underlying call was made with the expected @@ -5750,13 +5750,13 @@ def test_create_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].saved_query_id - mock_val = 'saved_query_id_value' + mock_val = "saved_query_id_value" assert arg == mock_val @@ -5770,9 +5770,9 @@ def test_create_saved_query_flattened_error(): with pytest.raises(ValueError): client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) @pytest.mark.asyncio @@ -5792,9 +5792,9 @@ async def test_create_saved_query_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_saved_query( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) # Establish that the underlying call was made with the expected @@ -5802,13 +5802,13 @@ async def test_create_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].saved_query_id - mock_val = 'saved_query_id_value' + mock_val = "saved_query_id_value" assert arg == mock_val @pytest.mark.asyncio @@ -5822,9 +5822,9 @@ async def test_create_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) @@ -5848,10 +5848,10 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.get_saved_query(request) @@ -5863,10 +5863,10 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_get_saved_query_non_empty_request_with_auto_populated_field(): @@ -5881,7 +5881,7 @@ def test_get_saved_query_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.GetSavedQueryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5893,7 +5893,7 @@ def test_get_saved_query_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetSavedQueryRequest( - name='name_value', + name="name_value", ) def test_get_saved_query_use_cached_wrapped_rpc(): @@ -5979,10 +5979,10 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", )) response = await client.get_saved_query(request) @@ -5994,10 +5994,10 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio @@ -6013,7 +6013,7 @@ def test_get_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.GetSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6045,7 +6045,7 @@ async def test_get_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.GetSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6081,7 +6081,7 @@ def test_get_saved_query_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6089,7 +6089,7 @@ def test_get_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -6103,7 +6103,7 @@ def test_get_saved_query_flattened_error(): with pytest.raises(ValueError): client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -6123,7 +6123,7 @@ async def test_get_saved_query_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6131,7 +6131,7 @@ async def test_get_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -6145,7 +6145,7 @@ async def test_get_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) @@ -6169,7 +6169,7 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_saved_queries(request) @@ -6181,7 +6181,7 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_saved_queries_non_empty_request_with_auto_populated_field(): @@ -6196,9 +6196,9 @@ def test_list_saved_queries_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.ListSavedQueriesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', + parent="parent_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6210,9 +6210,9 @@ def test_list_saved_queries_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListSavedQueriesRequest( - parent='parent_value', - filter='filter_value', - page_token='page_token_value', + parent="parent_value", + filter="filter_value", + page_token="page_token_value", ) def test_list_saved_queries_use_cached_wrapped_rpc(): @@ -6298,7 +6298,7 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_saved_queries(request) @@ -6310,7 +6310,7 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -6326,7 +6326,7 @@ def test_list_saved_queries_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListSavedQueriesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6358,7 +6358,7 @@ async def test_list_saved_queries_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListSavedQueriesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6394,7 +6394,7 @@ def test_list_saved_queries_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_saved_queries( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6402,7 +6402,7 @@ def test_list_saved_queries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -6416,7 +6416,7 @@ def test_list_saved_queries_flattened_error(): with pytest.raises(ValueError): client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -6436,7 +6436,7 @@ async def test_list_saved_queries_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_saved_queries( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6444,7 +6444,7 @@ async def test_list_saved_queries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -6458,7 +6458,7 @@ async def test_list_saved_queries_flattened_error_async(): with pytest.raises(ValueError): await client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -6678,10 +6678,10 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) response = client.update_saved_query(request) @@ -6693,10 +6693,10 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" def test_update_saved_query_non_empty_request_with_auto_populated_field(): @@ -6807,10 +6807,10 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", )) response = await client.update_saved_query(request) @@ -6822,10 +6822,10 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.asyncio @@ -6841,7 +6841,7 @@ def test_update_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.UpdateSavedQueryRequest() - request.saved_query.name = 'name_value' + request.saved_query.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6873,7 +6873,7 @@ async def test_update_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.UpdateSavedQueryRequest() - request.saved_query.name = 'name_value' + request.saved_query.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6909,8 +6909,8 @@ def test_update_saved_query_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_saved_query( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6918,10 +6918,10 @@ def test_update_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -6935,8 +6935,8 @@ def test_update_saved_query_flattened_error(): with pytest.raises(ValueError): client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -6956,8 +6956,8 @@ async def test_update_saved_query_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_saved_query( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6965,10 +6965,10 @@ async def test_update_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].saved_query - mock_val = asset_service.SavedQuery(name='name_value') + mock_val = asset_service.SavedQuery(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -6982,8 +6982,8 @@ async def test_update_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -7031,7 +7031,7 @@ def test_delete_saved_query_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.DeleteSavedQueryRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7043,7 +7043,7 @@ def test_delete_saved_query_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteSavedQueryRequest( - name='name_value', + name="name_value", ) def test_delete_saved_query_use_cached_wrapped_rpc(): @@ -7154,7 +7154,7 @@ def test_delete_saved_query_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.DeleteSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7186,7 +7186,7 @@ async def test_delete_saved_query_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.DeleteSavedQueryRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7222,7 +7222,7 @@ def test_delete_saved_query_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7230,7 +7230,7 @@ def test_delete_saved_query_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7244,7 +7244,7 @@ def test_delete_saved_query_flattened_error(): with pytest.raises(ValueError): client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -7264,7 +7264,7 @@ async def test_delete_saved_query_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_saved_query( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7272,7 +7272,7 @@ async def test_delete_saved_query_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -7286,7 +7286,7 @@ async def test_delete_saved_query_flattened_error_async(): with pytest.raises(ValueError): await client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) @@ -7335,7 +7335,7 @@ def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_ # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.BatchGetEffectiveIamPoliciesRequest( - scope='scope_value', + scope="scope_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7347,7 +7347,7 @@ def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_ call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest( - scope='scope_value', + scope="scope_value", ) def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): @@ -7459,7 +7459,7 @@ def test_batch_get_effective_iam_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetEffectiveIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7491,7 +7491,7 @@ async def test_batch_get_effective_iam_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetEffectiveIamPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7533,7 +7533,7 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policies(request) @@ -7545,7 +7545,7 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): @@ -7560,10 +7560,10 @@ def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPoliciesRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7575,10 +7575,10 @@ def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPoliciesRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) def test_analyze_org_policies_use_cached_wrapped_rpc(): @@ -7664,7 +7664,7 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.analyze_org_policies(request) @@ -7676,7 +7676,7 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -7692,7 +7692,7 @@ def test_analyze_org_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7724,7 +7724,7 @@ async def test_analyze_org_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPoliciesRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7760,9 +7760,9 @@ def test_analyze_org_policies_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policies( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -7770,13 +7770,13 @@ def test_analyze_org_policies_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -7790,9 +7790,9 @@ def test_analyze_org_policies_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @pytest.mark.asyncio @@ -7812,9 +7812,9 @@ async def test_analyze_org_policies_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policies( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -7822,13 +7822,13 @@ async def test_analyze_org_policies_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @pytest.mark.asyncio @@ -7842,9 +7842,9 @@ async def test_analyze_org_policies_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -8064,7 +8064,7 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policy_governed_containers(request) @@ -8076,7 +8076,7 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_populated_field(): @@ -8091,10 +8091,10 @@ def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_popu # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8106,10 +8106,10 @@ def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_popu call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): @@ -8195,7 +8195,7 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.analyze_org_policy_governed_containers(request) @@ -8207,7 +8207,7 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -8223,7 +8223,7 @@ def test_analyze_org_policy_governed_containers_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8255,7 +8255,7 @@ async def test_analyze_org_policy_governed_containers_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8291,9 +8291,9 @@ def test_analyze_org_policy_governed_containers_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policy_governed_containers( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8301,13 +8301,13 @@ def test_analyze_org_policy_governed_containers_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -8321,9 +8321,9 @@ def test_analyze_org_policy_governed_containers_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @pytest.mark.asyncio @@ -8343,9 +8343,9 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policy_governed_containers( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8353,13 +8353,13 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @pytest.mark.asyncio @@ -8373,9 +8373,9 @@ async def test_analyze_org_policy_governed_containers_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -8595,7 +8595,7 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc '__call__') as call: # Designate an appropriate return value for the call. call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.analyze_org_policy_governed_assets(request) @@ -8607,7 +8607,7 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populated_field(): @@ -8622,10 +8622,10 @@ def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populate # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8637,10 +8637,10 @@ def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populate call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', - page_token='page_token_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", + page_token="page_token_value", ) def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): @@ -8726,7 +8726,7 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.analyze_org_policy_governed_assets(request) @@ -8738,7 +8738,7 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -8754,7 +8754,7 @@ def test_analyze_org_policy_governed_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8786,7 +8786,7 @@ async def test_analyze_org_policy_governed_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - request.scope = 'scope_value' + request.scope = "scope_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8822,9 +8822,9 @@ def test_analyze_org_policy_governed_assets_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.analyze_org_policy_governed_assets( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8832,13 +8832,13 @@ def test_analyze_org_policy_governed_assets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @@ -8852,9 +8852,9 @@ def test_analyze_org_policy_governed_assets_flattened_error(): with pytest.raises(ValueError): client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @pytest.mark.asyncio @@ -8874,9 +8874,9 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.analyze_org_policy_governed_assets( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) # Establish that the underlying call was made with the expected @@ -8884,13 +8884,13 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].scope - mock_val = 'scope_value' + mock_val = "scope_value" assert arg == mock_val arg = args[0].constraint - mock_val = 'constraint_value' + mock_val = "constraint_value" assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val @pytest.mark.asyncio @@ -8904,9 +8904,9 @@ async def test_analyze_org_policy_governed_assets_flattened_error_async(): with pytest.raises(ValueError): await client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -9164,14 +9164,14 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9274,7 +9274,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -9283,7 +9283,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9351,7 +9351,7 @@ def test_list_assets_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -9385,7 +9385,7 @@ def test_list_assets_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_assets( asset_service.ListAssetsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -9504,7 +9504,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -9513,7 +9513,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9619,17 +9619,17 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' - jsonified_request["feedId"] = 'feed_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["feedId"] = "feed_id_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "feedId" in jsonified_request - assert jsonified_request["feedId"] == 'feed_id_value' + assert jsonified_request["feedId"] == "feed_id_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9698,7 +9698,7 @@ def test_create_feed_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -9732,7 +9732,7 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_feed( asset_service.CreateFeedRequest(), - parent='parent_value', + parent="parent_value", ) @@ -9789,14 +9789,14 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9864,7 +9864,7 @@ def test_get_feed_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -9898,7 +9898,7 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_feed( asset_service.GetFeedRequest(), - name='name_value', + name="name_value", ) @@ -9955,14 +9955,14 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10030,7 +10030,7 @@ def test_list_feeds_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -10064,7 +10064,7 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_feeds( asset_service.ListFeedsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -10192,7 +10192,7 @@ def test_update_feed_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) mock_args.update(sample_request) @@ -10226,7 +10226,7 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_feed( asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + feed=asset_service.Feed(name="name_value"), ) @@ -10283,14 +10283,14 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10355,7 +10355,7 @@ def test_delete_feed_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -10387,7 +10387,7 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_feed( asset_service.DeleteFeedRequest(), - name='name_value', + name="name_value", ) @@ -10444,7 +10444,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se # verify required fields with default values are now present - jsonified_request["scope"] = 'scope_value' + jsonified_request["scope"] = "scope_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -10453,7 +10453,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10521,9 +10521,9 @@ def test_search_all_resources_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) mock_args.update(sample_request) @@ -10557,9 +10557,9 @@ def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.search_all_resources( asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], + scope="scope_value", + query="query_value", + asset_types=["asset_types_value"], ) @@ -10678,7 +10678,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service # verify required fields with default values are now present - jsonified_request["scope"] = 'scope_value' + jsonified_request["scope"] = "scope_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -10687,7 +10687,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10755,8 +10755,8 @@ def test_search_all_iam_policies_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) mock_args.update(sample_request) @@ -10790,8 +10790,8 @@ def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.search_all_iam_policies( asset_service.SearchAllIamPoliciesRequest(), - scope='scope_value', - query='query_value', + scope="scope_value", + query="query_value", ) @@ -11132,8 +11132,8 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov assert "destinationParent" in jsonified_request assert jsonified_request["destinationParent"] == request_init["destination_parent"] - jsonified_request["resource"] = 'resource_value' - jsonified_request["destinationParent"] = 'destination_parent_value' + jsonified_request["resource"] = "resource_value" + jsonified_request["destinationParent"] = "destination_parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -11142,9 +11142,9 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov # verify required fields with non-default values are left alone assert "resource" in jsonified_request - assert jsonified_request["resource"] == 'resource_value' + assert jsonified_request["resource"] == "resource_value" assert "destinationParent" in jsonified_request - assert jsonified_request["destinationParent"] == 'destination_parent_value' + assert jsonified_request["destinationParent"] == "destination_parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11253,14 +11253,14 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11370,8 +11370,8 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea assert "savedQueryId" in jsonified_request assert jsonified_request["savedQueryId"] == request_init["saved_query_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["savedQueryId"] = 'saved_query_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["savedQueryId"] = "saved_query_id_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -11380,9 +11380,9 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "savedQueryId" in jsonified_request - assert jsonified_request["savedQueryId"] == 'saved_query_id_value' + assert jsonified_request["savedQueryId"] == "saved_query_id_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11455,9 +11455,9 @@ def test_create_saved_query_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) mock_args.update(sample_request) @@ -11491,9 +11491,9 @@ def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_saved_query( asset_service.CreateSavedQueryRequest(), - parent='parent_value', - saved_query=asset_service.SavedQuery(name='name_value'), - saved_query_id='saved_query_id_value', + parent="parent_value", + saved_query=asset_service.SavedQuery(name="name_value"), + saved_query_id="saved_query_id_value", ) @@ -11550,14 +11550,14 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11625,7 +11625,7 @@ def test_get_saved_query_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -11659,7 +11659,7 @@ def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_saved_query( asset_service.GetSavedQueryRequest(), - name='name_value', + name="name_value", ) @@ -11716,7 +11716,7 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -11725,7 +11725,7 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11793,7 +11793,7 @@ def test_list_saved_queries_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -11827,7 +11827,7 @@ def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_saved_queries( asset_service.ListSavedQueriesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -12019,8 +12019,8 @@ def test_update_saved_query_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -12054,8 +12054,8 @@ def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_saved_query( asset_service.UpdateSavedQueryRequest(), - saved_query=asset_service.SavedQuery(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + saved_query=asset_service.SavedQuery(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -12112,14 +12112,14 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12184,7 +12184,7 @@ def test_delete_saved_query_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -12216,7 +12216,7 @@ def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_saved_query( asset_service.DeleteSavedQueryRequest(), - name='name_value', + name="name_value", ) @@ -12277,8 +12277,8 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse assert "names" in jsonified_request assert jsonified_request["names"] == request_init["names"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["names"] = 'names_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["names"] = "names_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -12287,9 +12287,9 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "names" in jsonified_request - assert jsonified_request["names"] == 'names_value' + assert jsonified_request["names"] == "names_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12402,8 +12402,8 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -12412,9 +12412,9 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12486,9 +12486,9 @@ def test_analyze_org_policies_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -12522,9 +12522,9 @@ def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.analyze_org_policies( asset_service.AnalyzeOrgPoliciesRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -12647,8 +12647,8 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -12657,9 +12657,9 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12731,9 +12731,9 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -12767,9 +12767,9 @@ def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: with pytest.raises(ValueError): client.analyze_org_policy_governed_containers( asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -12892,8 +12892,8 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as assert "constraint" in jsonified_request assert jsonified_request["constraint"] == request_init["constraint"] - jsonified_request["scope"] = 'scope_value' - jsonified_request["constraint"] = 'constraint_value' + jsonified_request["scope"] = "scope_value" + jsonified_request["constraint"] = "constraint_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -12902,9 +12902,9 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as # verify required fields with non-default values are left alone assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" assert "constraint" in jsonified_request - assert jsonified_request["constraint"] == 'constraint_value' + assert jsonified_request["constraint"] == "constraint_value" client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12976,9 +12976,9 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) mock_args.update(sample_request) @@ -13012,9 +13012,9 @@ def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str with pytest.raises(ValueError): client.analyze_org_policy_governed_assets( asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), - scope='scope_value', - constraint='constraint_value', - filter='filter_value', + scope="scope_value", + constraint="constraint_value", + filter="filter_value", ) @@ -13768,7 +13768,7 @@ async def test_list_assets_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_assets(request=None) @@ -13821,11 +13821,11 @@ async def test_create_feed_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], )) await client.create_feed(request=None) @@ -13852,11 +13852,11 @@ async def test_get_feed_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], )) await client.get_feed(request=None) @@ -13909,11 +13909,11 @@ async def test_update_feed_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], )) await client.update_feed(request=None) @@ -13965,7 +13965,7 @@ async def test_search_all_resources_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.search_all_resources(request=None) @@ -13992,7 +13992,7 @@ async def test_search_all_iam_policies_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.search_all_iam_policies(request=None) @@ -14099,7 +14099,7 @@ async def test_query_assets_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', + job_reference="job_reference_value", done=True, )) await client.query_assets(request=None) @@ -14127,10 +14127,10 @@ async def test_create_saved_query_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", )) await client.create_saved_query(request=None) @@ -14157,10 +14157,10 @@ async def test_get_saved_query_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", )) await client.get_saved_query(request=None) @@ -14187,7 +14187,7 @@ async def test_list_saved_queries_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_saved_queries(request=None) @@ -14214,10 +14214,10 @@ async def test_update_saved_query_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", )) await client.update_saved_query(request=None) @@ -14295,7 +14295,7 @@ async def test_analyze_org_policies_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.analyze_org_policies(request=None) @@ -14322,7 +14322,7 @@ async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.analyze_org_policy_governed_containers(request=None) @@ -14349,7 +14349,7 @@ async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.analyze_org_policy_governed_assets(request=None) @@ -14509,7 +14509,7 @@ def test_list_assets_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -14526,7 +14526,7 @@ def test_list_assets_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14719,11 +14719,11 @@ def test_create_feed_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -14740,11 +14740,11 @@ def test_create_feed_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14833,11 +14833,11 @@ def test_get_feed_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -14854,11 +14854,11 @@ def test_get_feed_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -15051,11 +15051,11 @@ def test_update_feed_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], + name="name_value", + asset_names=["asset_names_value"], + asset_types=["asset_types_value"], content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], + relationship_types=["relationship_types_value"], ) # Wrap the value into a proper Response obj @@ -15072,11 +15072,11 @@ def test_update_feed_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] + assert response.name == "name_value" + assert response.asset_names == ["asset_names_value"] + assert response.asset_types == ["asset_types_value"] assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] + assert response.relationship_types == ["relationship_types_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -15255,7 +15255,7 @@ def test_search_all_resources_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -15272,7 +15272,7 @@ def test_search_all_resources_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -15361,7 +15361,7 @@ def test_search_all_iam_policies_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -15378,7 +15378,7 @@ def test_search_all_iam_policies_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -15778,7 +15778,7 @@ def test_query_assets_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', + job_reference="job_reference_value", done=True, ) @@ -15796,7 +15796,7 @@ def test_query_assets_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' + assert response.job_reference == "job_reference_value" assert response.done is True @@ -15950,10 +15950,10 @@ def get_message_fields(field): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -15970,10 +15970,10 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16062,10 +16062,10 @@ def test_get_saved_query_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -16082,10 +16082,10 @@ def test_get_saved_query_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16174,7 +16174,7 @@ def test_list_saved_queries_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16191,7 +16191,7 @@ def test_list_saved_queries_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16344,10 +16344,10 @@ def get_message_fields(field): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', + name="name_value", + description="description_value", + creator="creator_value", + last_updater="last_updater_value", ) # Wrap the value into a proper Response obj @@ -16364,10 +16364,10 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.creator == "creator_value" + assert response.last_updater == "last_updater_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16650,7 +16650,7 @@ def test_analyze_org_policies_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16667,7 +16667,7 @@ def test_analyze_org_policies_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16756,7 +16756,7 @@ def test_analyze_org_policy_governed_containers_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16773,7 +16773,7 @@ def test_analyze_org_policy_governed_containers_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -16862,7 +16862,7 @@ def test_analyze_org_policy_governed_assets_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16879,7 +16879,7 @@ def test_analyze_org_policy_governed_assets_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/tests/integration/goldens/credentials/google/iam/credentials/__init__.py index 9c0382dd33..44d06f22a3 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials/__init__.py +++ b/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -30,14 +30,15 @@ from google.iam.credentials_v1.types.common import SignJwtRequest from google.iam.credentials_v1.types.common import SignJwtResponse -__all__ = ('IAMCredentialsClient', - 'IAMCredentialsAsyncClient', - 'GenerateAccessTokenRequest', - 'GenerateAccessTokenResponse', - 'GenerateIdTokenRequest', - 'GenerateIdTokenResponse', - 'SignBlobRequest', - 'SignBlobResponse', - 'SignJwtRequest', - 'SignJwtResponse', +__all__ = ( + "IAMCredentialsClient", + "IAMCredentialsAsyncClient", + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GenerateIdTokenRequest", + "GenerateIdTokenResponse", + "SignBlobRequest", + "SignBlobResponse", + "SignJwtRequest", + "SignJwtResponse", ) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 2890169a65..23cfbbfcdb 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -40,10 +40,10 @@ from .types.common import SignJwtRequest from .types.common import SignJwtResponse -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.iam.credentials_v1") # type: ignore - api_core.check_dependency_versions("google.iam.credentials_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.iam.credentials_v1") # type: ignore + api_core.check_dependency_versions("google.iam.credentials_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: @@ -53,20 +53,24 @@ _py_version_str = sys.version.split()[0] _package_label = "google.iam.credentials_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -104,35 +108,39 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'IAMCredentialsAsyncClient', -'GenerateAccessTokenRequest', -'GenerateAccessTokenResponse', -'GenerateIdTokenRequest', -'GenerateIdTokenResponse', -'IAMCredentialsClient', -'SignBlobRequest', -'SignBlobResponse', -'SignJwtRequest', -'SignJwtResponse', + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GenerateIdTokenRequest", + "GenerateIdTokenResponse", + "IAMCredentialsAsyncClient", + "IAMCredentialsClient", + "SignBlobRequest", + "SignBlobResponse", + "SignJwtRequest", + "SignJwtResponse", ) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py index 9882907065..e176b7be00 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py @@ -17,6 +17,6 @@ from .async_client import IAMCredentialsAsyncClient __all__ = ( - 'IAMCredentialsClient', - 'IAMCredentialsAsyncClient', + "IAMCredentialsClient", + "IAMCredentialsAsyncClient", ) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index eac708473c..c90bcdd6d4 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.iam.credentials_v1 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -43,12 +54,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class IAMCredentialsAsyncClient: """A service account is a special type of Google account that belongs to your application or a virtual machine (VM), instead @@ -181,12 +194,14 @@ def universe_domain(self) -> str: get_transport_class = IAMCredentialsClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the iam credentials async client. Args: @@ -244,34 +259,36 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.iam.credentials_v1.IAMCredentialsAsyncClient`.", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.iam.credentials.v1.IAMCredentials", "credentialsType": None, - } + }, ) - async def generate_access_token(self, - request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - scope: Optional[MutableSequence[str]] = None, - lifetime: Optional[duration_pb2.Duration] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.GenerateAccessTokenResponse: + async def generate_access_token( + self, + request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + scope: Optional[MutableSequence[str]] = None, + lifetime: Optional[duration_pb2.Duration] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service account. @@ -374,8 +391,7 @@ async def sample_generate_access_token(): flattened_params = [name, delegates, scope, lifetime] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -399,11 +415,13 @@ async def sample_generate_access_token(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -419,17 +437,18 @@ async def sample_generate_access_token(): # Done; return the response. return response - async def generate_id_token(self, - request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - audience: Optional[str] = None, - include_email: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.GenerateIdTokenResponse: + async def generate_id_token( + self, + request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + audience: Optional[str] = None, + include_email: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service account. @@ -526,8 +545,7 @@ async def sample_generate_id_token(): flattened_params = [name, delegates, audience, include_email] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -551,11 +569,13 @@ async def sample_generate_id_token(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -571,16 +591,17 @@ async def sample_generate_id_token(): # Done; return the response. return response - async def sign_blob(self, - request: Optional[Union[common.SignBlobRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - payload: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.SignBlobResponse: + async def sign_blob( + self, + request: Optional[Union[common.SignBlobRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed private key. @@ -666,8 +687,7 @@ async def sample_sign_blob(): flattened_params = [name, delegates, payload] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -689,11 +709,13 @@ async def sample_sign_blob(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -709,16 +731,17 @@ async def sample_sign_blob(): # Done; return the response. return response - async def sign_jwt(self, - request: Optional[Union[common.SignJwtRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - payload: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.SignJwtResponse: + async def sign_jwt( + self, + request: Optional[Union[common.SignJwtRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed private key. @@ -807,8 +830,7 @@ async def sample_sign_jwt(): flattened_params = [name, delegates, payload] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -830,11 +852,13 @@ async def sample_sign_jwt(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -856,12 +880,11 @@ async def __aenter__(self) -> "IAMCredentialsAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "IAMCredentialsAsyncClient", -) +__all__ = ("IAMCredentialsAsyncClient",) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 4f2406bad6..533190d358 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.iam.credentials_v1 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -64,14 +77,16 @@ class IAMCredentialsClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]] _transport_registry["grpc"] = IAMCredentialsGrpcTransport _transport_registry["grpc_asyncio"] = IAMCredentialsGrpcAsyncIOTransport _transport_registry["rest"] = IAMCredentialsRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[IAMCredentialsTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[IAMCredentialsTransport]: """Returns an appropriate transport class. Args: @@ -117,9 +132,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -128,16 +141,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "iamcredentials.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "iamcredentials.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -151,21 +163,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -200,7 +210,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): IAMCredentialsClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -217,73 +228,116 @@ def transport(self) -> IAMCredentialsTransport: return self._transport @staticmethod - def service_account_path(project: str,service_account: str,) -> str: + def service_account_path( + project: str, + service_account: str, + ) -> str: """Returns a fully-qualified service_account string.""" - return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) @staticmethod - def parse_service_account_path(path: str) -> Dict[str,str]: + def parse_service_account_path( + path: str, + ) -> Dict[str, str]: """Parses a service_account path into its component segments.""" m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -315,8 +369,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = IAMCredentialsClient._use_client_cert_effective() @@ -446,7 +502,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -486,12 +542,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the iam credentials client. Args: @@ -551,12 +609,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = IAMCredentialsClient._read_environment_variables() self._client_cert_source = IAMCredentialsClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = IAMCredentialsClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -576,22 +634,22 @@ def __init__(self, *, if transport_provided: # transport is a IAMCredentialsTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(IAMCredentialsTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - IAMCredentialsClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or IAMCredentialsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -621,28 +679,31 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.iam.credentials_v1.IAMCredentialsClient`.", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.iam.credentials.v1.IAMCredentials", "credentialsType": None, - } + }, ) - def generate_access_token(self, - request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - scope: Optional[MutableSequence[str]] = None, - lifetime: Optional[duration_pb2.Duration] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.GenerateAccessTokenResponse: + def generate_access_token( + self, + request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + scope: Optional[MutableSequence[str]] = None, + lifetime: Optional[duration_pb2.Duration] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service account. @@ -745,8 +806,7 @@ def sample_generate_access_token(): flattened_params = [name, delegates, scope, lifetime] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -769,11 +829,13 @@ def sample_generate_access_token(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -789,17 +851,18 @@ def sample_generate_access_token(): # Done; return the response. return response - def generate_id_token(self, - request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - audience: Optional[str] = None, - include_email: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.GenerateIdTokenResponse: + def generate_id_token( + self, + request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + audience: Optional[str] = None, + include_email: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service account. @@ -896,8 +959,7 @@ def sample_generate_id_token(): flattened_params = [name, delegates, audience, include_email] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -920,11 +982,13 @@ def sample_generate_id_token(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -940,16 +1004,17 @@ def sample_generate_id_token(): # Done; return the response. return response - def sign_blob(self, - request: Optional[Union[common.SignBlobRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - payload: Optional[bytes] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.SignBlobResponse: + def sign_blob( + self, + request: Optional[Union[common.SignBlobRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[bytes] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed private key. @@ -1035,8 +1100,7 @@ def sample_sign_blob(): flattened_params = [name, delegates, payload] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1057,11 +1121,13 @@ def sample_sign_blob(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1077,16 +1143,17 @@ def sample_sign_blob(): # Done; return the response. return response - def sign_jwt(self, - request: Optional[Union[common.SignJwtRequest, dict]] = None, - *, - name: Optional[str] = None, - delegates: Optional[MutableSequence[str]] = None, - payload: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> common.SignJwtResponse: + def sign_jwt( + self, + request: Optional[Union[common.SignJwtRequest, dict]] = None, + *, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed private key. @@ -1175,8 +1242,7 @@ def sample_sign_jwt(): flattened_params = [name, delegates, payload] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1197,11 +1263,13 @@ def sample_sign_jwt(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1231,16 +1299,9 @@ def __exit__(self, type, value, traceback): self.transport.close() - - - - - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "IAMCredentialsClient", -) +__all__ = ("IAMCredentialsClient",) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py index 0214f01a40..e71a39ba08 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py @@ -25,14 +25,14 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]] -_transport_registry['grpc'] = IAMCredentialsGrpcTransport -_transport_registry['grpc_asyncio'] = IAMCredentialsGrpcAsyncIOTransport -_transport_registry['rest'] = IAMCredentialsRestTransport +_transport_registry["grpc"] = IAMCredentialsGrpcTransport +_transport_registry["grpc_asyncio"] = IAMCredentialsGrpcAsyncIOTransport +_transport_registry["rest"] = IAMCredentialsRestTransport __all__ = ( - 'IAMCredentialsTransport', - 'IAMCredentialsGrpcTransport', - 'IAMCredentialsGrpcAsyncIOTransport', - 'IAMCredentialsRestTransport', - 'IAMCredentialsRestInterceptor', + "IAMCredentialsTransport", + "IAMCredentialsGrpcTransport", + "IAMCredentialsGrpcAsyncIOTransport", + "IAMCredentialsRestTransport", + "IAMCredentialsRestInterceptor", ) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index c9c8fd9472..bf28b07735 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -24,7 +24,7 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.iam.credentials_v1.types import common @@ -38,29 +38,32 @@ class IAMCredentialsTransport(abc.ABC): """Abstract transport class for IAMCredentials.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', + "https://www.googleapis.com/auth/cloud-platform", ) + # fmt: on - DEFAULT_HOST: str = 'iamcredentials.googleapis.com' + DEFAULT_HOST: str = "iamcredentials.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'iamcredentials.googleapis.com'). + The hostname to connect to (default: "iamcredentials.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -96,10 +99,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -107,15 +110,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -185,58 +192,76 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ raise NotImplementedError() + # fmt: off @property - def generate_access_token(self) -> Callable[ - [common.GenerateAccessTokenRequest], - Union[ - common.GenerateAccessTokenResponse, - Awaitable[common.GenerateAccessTokenResponse] - ]]: + def generate_access_token( + self, + ) -> Callable[ + [common.GenerateAccessTokenRequest], + Union[ + common.GenerateAccessTokenResponse, + Awaitable[common.GenerateAccessTokenResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def generate_id_token(self) -> Callable[ - [common.GenerateIdTokenRequest], - Union[ - common.GenerateIdTokenResponse, - Awaitable[common.GenerateIdTokenResponse] - ]]: + def generate_id_token( + self, + ) -> Callable[ + [common.GenerateIdTokenRequest], + Union[ + common.GenerateIdTokenResponse, + Awaitable[common.GenerateIdTokenResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def sign_blob(self) -> Callable[ - [common.SignBlobRequest], - Union[ - common.SignBlobResponse, - Awaitable[common.SignBlobResponse] - ]]: + def sign_blob( + self, + ) -> Callable[ + [common.SignBlobRequest], + Union[ + common.SignBlobResponse, + Awaitable[common.SignBlobResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def sign_jwt(self) -> Callable[ - [common.SignJwtRequest], - Union[ - common.SignJwtResponse, - Awaitable[common.SignJwtResponse] - ]]: + def sign_jwt( + self, + ) -> Callable[ + [common.SignJwtRequest], + Union[ + common.SignJwtResponse, + Awaitable[common.SignJwtResponse] + ], + ]: raise NotImplementedError() + # fmt: on @property def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'IAMCredentialsTransport', -) +__all__ = ("IAMCredentialsTransport",) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 5baaeb8e5e..e06b8f467f 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -21,7 +21,7 @@ from google.api_core import grpc_helpers from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -35,6 +35,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -54,10 +55,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -65,7 +68,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -91,7 +94,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": client_call_details.method, "response": grpc_response, @@ -122,28 +125,31 @@ class IAMCredentialsGrpcTransport(IAMCredentialsTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'iamcredentials.googleapis.com'). + The hostname to connect to (default: "iamcredentials.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -217,7 +223,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -226,7 +233,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -261,19 +269,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -309,19 +319,18 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def generate_access_token(self) -> Callable[ - [common.GenerateAccessTokenRequest], - common.GenerateAccessTokenResponse]: + def generate_access_token( + self, + ) -> Callable[[common.GenerateAccessTokenRequest], common.GenerateAccessTokenResponse]: r"""Return a callable for the generate access token method over gRPC. Generates an OAuth 2.0 access token for a service @@ -337,18 +346,18 @@ def generate_access_token(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'generate_access_token' not in self._stubs: - self._stubs['generate_access_token'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', + if "generate_access_token" not in self._stubs: + self._stubs["generate_access_token"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken", request_serializer=common.GenerateAccessTokenRequest.serialize, response_deserializer=common.GenerateAccessTokenResponse.deserialize, ) - return self._stubs['generate_access_token'] + return self._stubs["generate_access_token"] @property - def generate_id_token(self) -> Callable[ - [common.GenerateIdTokenRequest], - common.GenerateIdTokenResponse]: + def generate_id_token( + self, + ) -> Callable[[common.GenerateIdTokenRequest], common.GenerateIdTokenResponse]: r"""Return a callable for the generate id token method over gRPC. Generates an OpenID Connect ID token for a service @@ -364,18 +373,18 @@ def generate_id_token(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'generate_id_token' not in self._stubs: - self._stubs['generate_id_token'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', + if "generate_id_token" not in self._stubs: + self._stubs["generate_id_token"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/GenerateIdToken", request_serializer=common.GenerateIdTokenRequest.serialize, response_deserializer=common.GenerateIdTokenResponse.deserialize, ) - return self._stubs['generate_id_token'] + return self._stubs["generate_id_token"] @property - def sign_blob(self) -> Callable[ - [common.SignBlobRequest], - common.SignBlobResponse]: + def sign_blob( + self, + ) -> Callable[[common.SignBlobRequest], common.SignBlobResponse]: r"""Return a callable for the sign blob method over gRPC. Signs a blob using a service account's system-managed @@ -391,18 +400,18 @@ def sign_blob(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'sign_blob' not in self._stubs: - self._stubs['sign_blob'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/SignBlob', + if "sign_blob" not in self._stubs: + self._stubs["sign_blob"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/SignBlob", request_serializer=common.SignBlobRequest.serialize, response_deserializer=common.SignBlobResponse.deserialize, ) - return self._stubs['sign_blob'] + return self._stubs["sign_blob"] @property - def sign_jwt(self) -> Callable[ - [common.SignJwtRequest], - common.SignJwtResponse]: + def sign_jwt( + self, + ) -> Callable[[common.SignJwtRequest], common.SignJwtResponse]: r"""Return a callable for the sign jwt method over gRPC. Signs a JWT using a service account's system-managed @@ -418,13 +427,13 @@ def sign_jwt(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'sign_jwt' not in self._stubs: - self._stubs['sign_jwt'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/SignJwt', + if "sign_jwt" not in self._stubs: + self._stubs["sign_jwt"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/SignJwt", request_serializer=common.SignJwtRequest.serialize, response_deserializer=common.SignJwtResponse.deserialize, ) - return self._stubs['sign_jwt'] + return self._stubs["sign_jwt"] def close(self): self._logged_channel.close() @@ -434,6 +443,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'IAMCredentialsGrpcTransport', -) +__all__ = ("IAMCredentialsGrpcTransport",) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index a793abd09c..963ec375cb 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -24,13 +24,13 @@ from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.iam.credentials_v1.types import common @@ -39,6 +39,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -58,10 +59,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -69,7 +72,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -95,7 +98,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -131,13 +134,15 @@ class IAMCredentialsGrpcAsyncIOTransport(IAMCredentialsTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -168,29 +173,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'iamcredentials.googleapis.com'). + The hostname to connect to (default: "iamcredentials.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -264,7 +271,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -273,7 +281,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -325,9 +334,9 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def generate_access_token(self) -> Callable[ - [common.GenerateAccessTokenRequest], - Awaitable[common.GenerateAccessTokenResponse]]: + def generate_access_token( + self, + ) -> Callable[[common.GenerateAccessTokenRequest], Awaitable[common.GenerateAccessTokenResponse]]: r"""Return a callable for the generate access token method over gRPC. Generates an OAuth 2.0 access token for a service @@ -343,18 +352,18 @@ def generate_access_token(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'generate_access_token' not in self._stubs: - self._stubs['generate_access_token'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', + if "generate_access_token" not in self._stubs: + self._stubs["generate_access_token"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken", request_serializer=common.GenerateAccessTokenRequest.serialize, response_deserializer=common.GenerateAccessTokenResponse.deserialize, ) - return self._stubs['generate_access_token'] + return self._stubs["generate_access_token"] @property - def generate_id_token(self) -> Callable[ - [common.GenerateIdTokenRequest], - Awaitable[common.GenerateIdTokenResponse]]: + def generate_id_token( + self, + ) -> Callable[[common.GenerateIdTokenRequest], Awaitable[common.GenerateIdTokenResponse]]: r"""Return a callable for the generate id token method over gRPC. Generates an OpenID Connect ID token for a service @@ -370,18 +379,18 @@ def generate_id_token(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'generate_id_token' not in self._stubs: - self._stubs['generate_id_token'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', + if "generate_id_token" not in self._stubs: + self._stubs["generate_id_token"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/GenerateIdToken", request_serializer=common.GenerateIdTokenRequest.serialize, response_deserializer=common.GenerateIdTokenResponse.deserialize, ) - return self._stubs['generate_id_token'] + return self._stubs["generate_id_token"] @property - def sign_blob(self) -> Callable[ - [common.SignBlobRequest], - Awaitable[common.SignBlobResponse]]: + def sign_blob( + self, + ) -> Callable[[common.SignBlobRequest], Awaitable[common.SignBlobResponse]]: r"""Return a callable for the sign blob method over gRPC. Signs a blob using a service account's system-managed @@ -397,18 +406,18 @@ def sign_blob(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'sign_blob' not in self._stubs: - self._stubs['sign_blob'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/SignBlob', + if "sign_blob" not in self._stubs: + self._stubs["sign_blob"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/SignBlob", request_serializer=common.SignBlobRequest.serialize, response_deserializer=common.SignBlobResponse.deserialize, ) - return self._stubs['sign_blob'] + return self._stubs["sign_blob"] @property - def sign_jwt(self) -> Callable[ - [common.SignJwtRequest], - Awaitable[common.SignJwtResponse]]: + def sign_jwt( + self, + ) -> Callable[[common.SignJwtRequest], Awaitable[common.SignJwtResponse]]: r"""Return a callable for the sign jwt method over gRPC. Signs a JWT using a service account's system-managed @@ -424,16 +433,16 @@ def sign_jwt(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'sign_jwt' not in self._stubs: - self._stubs['sign_jwt'] = self._logged_channel.unary_unary( - '/google.iam.credentials.v1.IAMCredentials/SignJwt', + if "sign_jwt" not in self._stubs: + self._stubs["sign_jwt"] = self._logged_channel.unary_unary( + "/google.iam.credentials.v1.IAMCredentials/SignJwt", request_serializer=common.SignJwtRequest.serialize, response_deserializer=common.SignJwtResponse.deserialize, ) - return self._stubs['sign_jwt'] + return self._stubs["sign_jwt"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.generate_access_token: self._wrap_method( self.generate_access_token, @@ -510,6 +519,4 @@ def kind(self) -> str: return "grpc_asyncio" -__all__ = ( - 'IAMCredentialsGrpcAsyncIOTransport', -) +__all__ = ("IAMCredentialsGrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index c0ad2e73ea..c691ca8a89 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -46,6 +46,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -114,7 +115,12 @@ def post_sign_jwt(self, response): """ - def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateAccessTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_generate_access_token( + self, + request: common.GenerateAccessTokenRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.GenerateAccessTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for generate_access_token Override in a subclass to manipulate the request or metadata @@ -122,7 +128,10 @@ def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, """ return request, metadata - def post_generate_access_token(self, response: common.GenerateAccessTokenResponse) -> common.GenerateAccessTokenResponse: + def post_generate_access_token( + self, + response: common.GenerateAccessTokenResponse, + ) -> common.GenerateAccessTokenResponse: """Post-rpc interceptor for generate_access_token DEPRECATED. Please use the `post_generate_access_token_with_metadata` @@ -135,7 +144,11 @@ def post_generate_access_token(self, response: common.GenerateAccessTokenRespons """ return response - def post_generate_access_token_with_metadata(self, response: common.GenerateAccessTokenResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateAccessTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_generate_access_token_with_metadata( + self, + response: common.GenerateAccessTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.GenerateAccessTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for generate_access_token Override in a subclass to read or manipulate the response or metadata after it @@ -150,7 +163,11 @@ def post_generate_access_token_with_metadata(self, response: common.GenerateAcce """ return response, metadata - def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_generate_id_token( + self, + request: common.GenerateIdTokenRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for generate_id_token Override in a subclass to manipulate the request or metadata @@ -158,7 +175,10 @@ def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata """ return request, metadata - def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> common.GenerateIdTokenResponse: + def post_generate_id_token( + self, + response: common.GenerateIdTokenResponse, + ) -> common.GenerateIdTokenResponse: """Post-rpc interceptor for generate_id_token DEPRECATED. Please use the `post_generate_id_token_with_metadata` @@ -171,7 +191,11 @@ def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> co """ return response - def post_generate_id_token_with_metadata(self, response: common.GenerateIdTokenResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_generate_id_token_with_metadata( + self, + response: common.GenerateIdTokenResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.GenerateIdTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for generate_id_token Override in a subclass to read or manipulate the response or metadata after it @@ -186,7 +210,11 @@ def post_generate_id_token_with_metadata(self, response: common.GenerateIdTokenR """ return response, metadata - def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_sign_blob( + self, + request: common.SignBlobRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_blob Override in a subclass to manipulate the request or metadata @@ -194,7 +222,10 @@ def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tupl """ return request, metadata - def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobResponse: + def post_sign_blob( + self, + response: common.SignBlobResponse, + ) -> common.SignBlobResponse: """Post-rpc interceptor for sign_blob DEPRECATED. Please use the `post_sign_blob_with_metadata` @@ -207,7 +238,11 @@ def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobRe """ return response - def post_sign_blob_with_metadata(self, response: common.SignBlobResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_sign_blob_with_metadata( + self, + response: common.SignBlobResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignBlobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for sign_blob Override in a subclass to read or manipulate the response or metadata after it @@ -222,7 +257,11 @@ def post_sign_blob_with_metadata(self, response: common.SignBlobResponse, metada """ return response, metadata - def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_sign_jwt( + self, + request: common.SignJwtRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_jwt Override in a subclass to manipulate the request or metadata @@ -230,7 +269,10 @@ def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[ """ return request, metadata - def post_sign_jwt(self, response: common.SignJwtResponse) -> common.SignJwtResponse: + def post_sign_jwt( + self, + response: common.SignJwtResponse, + ) -> common.SignJwtResponse: """Post-rpc interceptor for sign_jwt DEPRECATED. Please use the `post_sign_jwt_with_metadata` @@ -243,7 +285,11 @@ def post_sign_jwt(self, response: common.SignJwtResponse) -> common.SignJwtRespo """ return response - def post_sign_jwt_with_metadata(self, response: common.SignJwtResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_sign_jwt_with_metadata( + self, + response: common.SignJwtResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.SignJwtResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for sign_jwt Override in a subclass to read or manipulate the response or metadata after it @@ -287,29 +333,30 @@ class IAMCredentialsRestTransport(_BaseIAMCredentialsRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[IAMCredentialsRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[IAMCredentialsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! Args: host (Optional[str]): - The hostname to connect to (default: 'iamcredentials.googleapis.com'). + The hostname to connect to (default: "iamcredentials.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -348,10 +395,9 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._interceptor = interceptor or IAMCredentialsRestInterceptor() @@ -369,27 +415,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: common.GenerateAccessTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> common.GenerateAccessTokenResponse: + def __call__( + self, + request: common.GenerateAccessTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateAccessTokenResponse: r"""Call the generate access token method over HTTP. Args: @@ -419,21 +467,21 @@ def __call__(self, query_params = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateAccessToken", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateAccessToken", "httpRequest": http_request, @@ -442,7 +490,15 @@ def __call__(self, ) # Send the request - response = IAMCredentialsRestTransport._GenerateAccessToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = IAMCredentialsRestTransport._GenerateAccessToken._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -464,13 +520,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.iam.credentials_v1.IAMCredentialsClient.generate_access_token", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateAccessToken", "metadata": http_response["headers"], @@ -491,27 +547,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: common.GenerateIdTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> common.GenerateIdTokenResponse: + def __call__( + self, + request: common.GenerateIdTokenRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.GenerateIdTokenResponse: r"""Call the generate id token method over HTTP. Args: @@ -541,21 +599,21 @@ def __call__(self, query_params = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateIdToken", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateIdToken", "httpRequest": http_request, @@ -564,7 +622,15 @@ def __call__(self, ) # Send the request - response = IAMCredentialsRestTransport._GenerateIdToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = IAMCredentialsRestTransport._GenerateIdToken._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -586,13 +652,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.iam.credentials_v1.IAMCredentialsClient.generate_id_token", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateIdToken", "metadata": http_response["headers"], @@ -613,27 +679,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: common.SignBlobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> common.SignBlobResponse: + def __call__( + self, + request: common.SignBlobRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignBlobResponse: r"""Call the sign blob method over HTTP. Args: @@ -663,21 +731,21 @@ def __call__(self, query_params = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignBlob", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignBlob", "httpRequest": http_request, @@ -686,7 +754,15 @@ def __call__(self, ) # Send the request - response = IAMCredentialsRestTransport._SignBlob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = IAMCredentialsRestTransport._SignBlob._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -708,13 +784,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.iam.credentials_v1.IAMCredentialsClient.sign_blob", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignBlob", "metadata": http_response["headers"], @@ -735,27 +811,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: common.SignJwtRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> common.SignJwtResponse: + def __call__( + self, + request: common.SignJwtRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> common.SignJwtResponse: r"""Call the sign jwt method over HTTP. Args: @@ -785,21 +863,21 @@ def __call__(self, query_params = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignJwt", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignJwt", "httpRequest": http_request, @@ -808,7 +886,15 @@ def __call__(self, ) # Send the request - response = IAMCredentialsRestTransport._SignJwt._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = IAMCredentialsRestTransport._SignJwt._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -830,13 +916,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.iam.credentials_v1.IAMCredentialsClient.sign_jwt", - extra = { + extra={ "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignJwt", "metadata": http_response["headers"], @@ -845,37 +931,45 @@ def __call__(self, ) return resp + # fmt: off @property - def generate_access_token(self) -> Callable[ - [common.GenerateAccessTokenRequest], - common.GenerateAccessTokenResponse]: + def generate_access_token( + self + ) -> Callable[[common.GenerateAccessTokenRequest], common.GenerateAccessTokenResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore + return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def generate_id_token(self) -> Callable[ - [common.GenerateIdTokenRequest], - common.GenerateIdTokenResponse]: + def generate_id_token( + self + ) -> Callable[[common.GenerateIdTokenRequest], common.GenerateIdTokenResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GenerateIdToken(self._session, self._host, self._interceptor) # type: ignore + return self._GenerateIdToken(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def sign_blob(self) -> Callable[ - [common.SignBlobRequest], - common.SignBlobResponse]: + def sign_blob( + self + ) -> Callable[[common.SignBlobRequest], common.SignBlobResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SignBlob(self._session, self._host, self._interceptor) # type: ignore + return self._SignBlob(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def sign_jwt(self) -> Callable[ - [common.SignJwtRequest], - common.SignJwtResponse]: + def sign_jwt( + self + ) -> Callable[[common.SignJwtRequest], common.SignJwtResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SignJwt(self._session, self._host, self._interceptor) # type: ignore + return self._SignJwt(self._session, self._host, self._interceptor) # type: ignore + # fmt: on @property def kind(self) -> str: @@ -885,6 +979,4 @@ def close(self): self._session.close() -__all__=( - 'IAMCredentialsRestTransport', -) +__all__ = ("IAMCredentialsRestTransport",) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py index a03907410f..0c17413bad 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py @@ -40,18 +40,20 @@ class _BaseIAMCredentialsRestTransport(IAMCredentialsTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'iamcredentials.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "iamcredentials.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'iamcredentials.googleapis.com'). + The hostname to connect to (default: "iamcredentials.googleapis.com"). credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -82,15 +84,17 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseGenerateAccessToken: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -98,11 +102,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken", + "body": "*", + }, ] return http_options @@ -117,16 +122,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_unset_required_fields(query_params)) return query_params @@ -135,8 +143,10 @@ class _BaseGenerateIdToken: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -144,11 +154,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateIdToken', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/serviceAccounts/*}:generateIdToken", + "body": "*", + }, ] return http_options @@ -163,16 +174,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_unset_required_fields(query_params)) return query_params @@ -181,8 +195,10 @@ class _BaseSignBlob: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -190,11 +206,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signBlob', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/serviceAccounts/*}:signBlob", + "body": "*", + }, ] return http_options @@ -209,16 +226,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseIAMCredentialsRestTransport._BaseSignBlob._get_unset_required_fields(query_params)) return query_params @@ -227,8 +247,10 @@ class _BaseSignJwt: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -236,11 +258,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signJwt', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/serviceAccounts/*}:signJwt", + "body": "*", + }, ] return http_options @@ -255,21 +278,22 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseIAMCredentialsRestTransport._BaseSignJwt._get_unset_required_fields(query_params)) return query_params -__all__=( - '_BaseIAMCredentialsRestTransport', -) +__all__ = ("_BaseIAMCredentialsRestTransport",) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py index f257b88992..e3846fefa8 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py @@ -25,12 +25,12 @@ ) __all__ = ( - 'GenerateAccessTokenRequest', - 'GenerateAccessTokenResponse', - 'GenerateIdTokenRequest', - 'GenerateIdTokenResponse', - 'SignBlobRequest', - 'SignBlobResponse', - 'SignJwtRequest', - 'SignJwtResponse', + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "GenerateIdTokenRequest", + "GenerateIdTokenResponse", + "SignBlobRequest", + "SignBlobResponse", + "SignJwtRequest", + "SignJwtResponse", ) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index f267a4b7a5..67c4a75bb6 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -23,19 +23,21 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.iam.credentials.v1', + package="google.iam.credentials.v1", manifest={ - 'GenerateAccessTokenRequest', - 'GenerateAccessTokenResponse', - 'SignBlobRequest', - 'SignBlobResponse', - 'SignJwtRequest', - 'SignJwtResponse', - 'GenerateIdTokenRequest', - 'GenerateIdTokenResponse', + "GenerateAccessTokenRequest", + "GenerateAccessTokenResponse", + "SignBlobRequest", + "SignBlobResponse", + "SignJwtRequest", + "SignJwtResponse", + "GenerateIdTokenRequest", + "GenerateIdTokenResponse", }, ) +# fmt: on class GenerateAccessTokenRequest(proto.Message): diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py index 17d048bd5d..ebcf53e0e0 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py @@ -16,11 +16,13 @@ import proto # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.iam.credentials.v1', + package="google.iam.credentials.v1", manifest={ }, ) +# fmt: on __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/integration/goldens/credentials/noxfile.py b/tests/integration/goldens/credentials/noxfile.py index 7be79219b6..4f968ac064 100755 --- a/tests/integration/goldens/credentials/noxfile.py +++ b/tests/integration/goldens/credentials/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -FORMAT_PATHS = ["google", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # We're most interested in ensuring that code is formatted properly diff --git a/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index d9f2f0a4be..9e550cd17e 100755 --- a/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -975,7 +975,7 @@ def test_generate_access_token(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = common.GenerateAccessTokenResponse( - access_token='access_token_value', + access_token="access_token_value", ) response = client.generate_access_token(request) @@ -987,7 +987,7 @@ def test_generate_access_token(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' + assert response.access_token == "access_token_value" def test_generate_access_token_non_empty_request_with_auto_populated_field(): @@ -1002,7 +1002,7 @@ def test_generate_access_token_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = common.GenerateAccessTokenRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1014,7 +1014,7 @@ def test_generate_access_token_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateAccessTokenRequest( - name='name_value', + name="name_value", ) def test_generate_access_token_use_cached_wrapped_rpc(): @@ -1100,7 +1100,7 @@ async def test_generate_access_token_async(transport: str = 'grpc_asyncio', requ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( - access_token='access_token_value', + access_token="access_token_value", )) response = await client.generate_access_token(request) @@ -1112,7 +1112,7 @@ async def test_generate_access_token_async(transport: str = 'grpc_asyncio', requ # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' + assert response.access_token == "access_token_value" @pytest.mark.asyncio @@ -1128,7 +1128,7 @@ def test_generate_access_token_field_headers(): # a field header. Set these to a non-empty value. request = common.GenerateAccessTokenRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1160,7 +1160,7 @@ async def test_generate_access_token_field_headers_async(): # a field header. Set these to a non-empty value. request = common.GenerateAccessTokenRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1196,9 +1196,9 @@ def test_generate_access_token_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.generate_access_token( - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) @@ -1207,13 +1207,13 @@ def test_generate_access_token_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].scope - mock_val = ['scope_value'] + mock_val = ["scope_value"] assert arg == mock_val assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) @@ -1228,9 +1228,9 @@ def test_generate_access_token_flattened_error(): with pytest.raises(ValueError): client.generate_access_token( common.GenerateAccessTokenRequest(), - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) @@ -1251,9 +1251,9 @@ async def test_generate_access_token_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.generate_access_token( - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) @@ -1262,13 +1262,13 @@ async def test_generate_access_token_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].scope - mock_val = ['scope_value'] + mock_val = ["scope_value"] assert arg == mock_val assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) @@ -1283,9 +1283,9 @@ async def test_generate_access_token_flattened_error_async(): with pytest.raises(ValueError): await client.generate_access_token( common.GenerateAccessTokenRequest(), - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) @@ -1310,7 +1310,7 @@ def test_generate_id_token(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = common.GenerateIdTokenResponse( - token='token_value', + token="token_value", ) response = client.generate_id_token(request) @@ -1322,7 +1322,7 @@ def test_generate_id_token(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' + assert response.token == "token_value" def test_generate_id_token_non_empty_request_with_auto_populated_field(): @@ -1337,8 +1337,8 @@ def test_generate_id_token_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = common.GenerateIdTokenRequest( - name='name_value', - audience='audience_value', + name="name_value", + audience="audience_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1350,8 +1350,8 @@ def test_generate_id_token_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateIdTokenRequest( - name='name_value', - audience='audience_value', + name="name_value", + audience="audience_value", ) def test_generate_id_token_use_cached_wrapped_rpc(): @@ -1437,7 +1437,7 @@ async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( - token='token_value', + token="token_value", )) response = await client.generate_id_token(request) @@ -1449,7 +1449,7 @@ async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' + assert response.token == "token_value" @pytest.mark.asyncio @@ -1465,7 +1465,7 @@ def test_generate_id_token_field_headers(): # a field header. Set these to a non-empty value. request = common.GenerateIdTokenRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1497,7 +1497,7 @@ async def test_generate_id_token_field_headers_async(): # a field header. Set these to a non-empty value. request = common.GenerateIdTokenRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1533,9 +1533,9 @@ def test_generate_id_token_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.generate_id_token( - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) @@ -1544,13 +1544,13 @@ def test_generate_id_token_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].audience - mock_val = 'audience_value' + mock_val = "audience_value" assert arg == mock_val arg = args[0].include_email mock_val = True @@ -1567,9 +1567,9 @@ def test_generate_id_token_flattened_error(): with pytest.raises(ValueError): client.generate_id_token( common.GenerateIdTokenRequest(), - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) @@ -1590,9 +1590,9 @@ async def test_generate_id_token_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.generate_id_token( - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) @@ -1601,13 +1601,13 @@ async def test_generate_id_token_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].audience - mock_val = 'audience_value' + mock_val = "audience_value" assert arg == mock_val arg = args[0].include_email mock_val = True @@ -1624,9 +1624,9 @@ async def test_generate_id_token_flattened_error_async(): with pytest.raises(ValueError): await client.generate_id_token( common.GenerateIdTokenRequest(), - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) @@ -1651,7 +1651,7 @@ def test_sign_blob(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = common.SignBlobResponse( - key_id='key_id_value', + key_id="key_id_value", signed_blob=b'signed_blob_blob', ) response = client.sign_blob(request) @@ -1664,7 +1664,7 @@ def test_sign_blob(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' + assert response.key_id == "key_id_value" assert response.signed_blob == b'signed_blob_blob' @@ -1680,7 +1680,7 @@ def test_sign_blob_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = common.SignBlobRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1692,7 +1692,7 @@ def test_sign_blob_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.SignBlobRequest( - name='name_value', + name="name_value", ) def test_sign_blob_use_cached_wrapped_rpc(): @@ -1778,7 +1778,7 @@ async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=com '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( - key_id='key_id_value', + key_id="key_id_value", signed_blob=b'signed_blob_blob', )) response = await client.sign_blob(request) @@ -1791,7 +1791,7 @@ async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=com # Establish that the response is the type that we expect. assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' + assert response.key_id == "key_id_value" assert response.signed_blob == b'signed_blob_blob' @@ -1808,7 +1808,7 @@ def test_sign_blob_field_headers(): # a field header. Set these to a non-empty value. request = common.SignBlobRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1840,7 +1840,7 @@ async def test_sign_blob_field_headers_async(): # a field header. Set these to a non-empty value. request = common.SignBlobRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1876,8 +1876,8 @@ def test_sign_blob_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.sign_blob( - name='name_value', - delegates=['delegates_value'], + name="name_value", + delegates=["delegates_value"], payload=b'payload_blob', ) @@ -1886,10 +1886,10 @@ def test_sign_blob_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].payload mock_val = b'payload_blob' @@ -1906,8 +1906,8 @@ def test_sign_blob_flattened_error(): with pytest.raises(ValueError): client.sign_blob( common.SignBlobRequest(), - name='name_value', - delegates=['delegates_value'], + name="name_value", + delegates=["delegates_value"], payload=b'payload_blob', ) @@ -1928,8 +1928,8 @@ async def test_sign_blob_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.sign_blob( - name='name_value', - delegates=['delegates_value'], + name="name_value", + delegates=["delegates_value"], payload=b'payload_blob', ) @@ -1938,10 +1938,10 @@ async def test_sign_blob_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].payload mock_val = b'payload_blob' @@ -1958,8 +1958,8 @@ async def test_sign_blob_flattened_error_async(): with pytest.raises(ValueError): await client.sign_blob( common.SignBlobRequest(), - name='name_value', - delegates=['delegates_value'], + name="name_value", + delegates=["delegates_value"], payload=b'payload_blob', ) @@ -1984,8 +1984,8 @@ def test_sign_jwt(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', + key_id="key_id_value", + signed_jwt="signed_jwt_value", ) response = client.sign_jwt(request) @@ -1997,8 +1997,8 @@ def test_sign_jwt(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' + assert response.key_id == "key_id_value" + assert response.signed_jwt == "signed_jwt_value" def test_sign_jwt_non_empty_request_with_auto_populated_field(): @@ -2013,8 +2013,8 @@ def test_sign_jwt_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = common.SignJwtRequest( - name='name_value', - payload='payload_value', + name="name_value", + payload="payload_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2026,8 +2026,8 @@ def test_sign_jwt_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.SignJwtRequest( - name='name_value', - payload='payload_value', + name="name_value", + payload="payload_value", ) def test_sign_jwt_use_cached_wrapped_rpc(): @@ -2113,8 +2113,8 @@ async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=comm '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', + key_id="key_id_value", + signed_jwt="signed_jwt_value", )) response = await client.sign_jwt(request) @@ -2126,8 +2126,8 @@ async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=comm # Establish that the response is the type that we expect. assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' + assert response.key_id == "key_id_value" + assert response.signed_jwt == "signed_jwt_value" @pytest.mark.asyncio @@ -2143,7 +2143,7 @@ def test_sign_jwt_field_headers(): # a field header. Set these to a non-empty value. request = common.SignJwtRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2175,7 +2175,7 @@ async def test_sign_jwt_field_headers_async(): # a field header. Set these to a non-empty value. request = common.SignJwtRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2211,9 +2211,9 @@ def test_sign_jwt_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.sign_jwt( - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) # Establish that the underlying call was made with the expected @@ -2221,13 +2221,13 @@ def test_sign_jwt_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].payload - mock_val = 'payload_value' + mock_val = "payload_value" assert arg == mock_val @@ -2241,9 +2241,9 @@ def test_sign_jwt_flattened_error(): with pytest.raises(ValueError): client.sign_jwt( common.SignJwtRequest(), - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) @pytest.mark.asyncio @@ -2263,9 +2263,9 @@ async def test_sign_jwt_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.sign_jwt( - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) # Establish that the underlying call was made with the expected @@ -2273,13 +2273,13 @@ async def test_sign_jwt_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].delegates - mock_val = ['delegates_value'] + mock_val = ["delegates_value"] assert arg == mock_val arg = args[0].payload - mock_val = 'payload_value' + mock_val = "payload_value" assert arg == mock_val @pytest.mark.asyncio @@ -2293,9 +2293,9 @@ async def test_sign_jwt_flattened_error_async(): with pytest.raises(ValueError): await client.sign_jwt( common.SignJwtRequest(), - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) @@ -2353,17 +2353,17 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["scope"] = 'scope_value' + jsonified_request["name"] = "name_value" + jsonified_request["scope"] = "scope_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "scope" in jsonified_request - assert jsonified_request["scope"] == 'scope_value' + assert jsonified_request["scope"] == "scope_value" client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2432,9 +2432,9 @@ def test_generate_access_token_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) mock_args.update(sample_request) @@ -2469,9 +2469,9 @@ def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.generate_access_token( common.GenerateAccessTokenRequest(), - name='name_value', - delegates=['delegates_value'], - scope=['scope_value'], + name="name_value", + delegates=["delegates_value"], + scope=["scope_value"], lifetime=duration_pb2.Duration(seconds=751), ) @@ -2530,17 +2530,17 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["audience"] = 'audience_value' + jsonified_request["name"] = "name_value" + jsonified_request["audience"] = "audience_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "audience" in jsonified_request - assert jsonified_request["audience"] == 'audience_value' + assert jsonified_request["audience"] == "audience_value" client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2609,9 +2609,9 @@ def test_generate_id_token_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) mock_args.update(sample_request) @@ -2646,9 +2646,9 @@ def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.generate_id_token( common.GenerateIdTokenRequest(), - name='name_value', - delegates=['delegates_value'], - audience='audience_value', + name="name_value", + delegates=["delegates_value"], + audience="audience_value", include_email=True, ) @@ -2707,7 +2707,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" jsonified_request["payload"] = b'payload_blob' unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) @@ -2715,7 +2715,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "payload" in jsonified_request assert jsonified_request["payload"] == b'payload_blob' @@ -2786,8 +2786,8 @@ def test_sign_blob_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', - delegates=['delegates_value'], + name="name_value", + delegates=["delegates_value"], payload=b'payload_blob', ) mock_args.update(sample_request) @@ -2822,8 +2822,8 @@ def test_sign_blob_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.sign_blob( common.SignBlobRequest(), - name='name_value', - delegates=['delegates_value'], + name="name_value", + delegates=["delegates_value"], payload=b'payload_blob', ) @@ -2882,17 +2882,17 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["payload"] = 'payload_value' + jsonified_request["name"] = "name_value" + jsonified_request["payload"] = "payload_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "payload" in jsonified_request - assert jsonified_request["payload"] == 'payload_value' + assert jsonified_request["payload"] == "payload_value" client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2961,9 +2961,9 @@ def test_sign_jwt_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) mock_args.update(sample_request) @@ -2997,9 +2997,9 @@ def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.sign_jwt( common.SignJwtRequest(), - name='name_value', - delegates=['delegates_value'], - payload='payload_value', + name="name_value", + delegates=["delegates_value"], + payload="payload_value", ) @@ -3227,7 +3227,7 @@ async def test_generate_access_token_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( - access_token='access_token_value', + access_token="access_token_value", )) await client.generate_access_token(request=None) @@ -3254,7 +3254,7 @@ async def test_generate_id_token_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( - token='token_value', + token="token_value", )) await client.generate_id_token(request=None) @@ -3281,7 +3281,7 @@ async def test_sign_blob_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( - key_id='key_id_value', + key_id="key_id_value", signed_blob=b'signed_blob_blob', )) await client.sign_blob(request=None) @@ -3309,8 +3309,8 @@ async def test_sign_jwt_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', + key_id="key_id_value", + signed_jwt="signed_jwt_value", )) await client.sign_jwt(request=None) @@ -3369,7 +3369,7 @@ def test_generate_access_token_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = common.GenerateAccessTokenResponse( - access_token='access_token_value', + access_token="access_token_value", ) # Wrap the value into a proper Response obj @@ -3386,7 +3386,7 @@ def test_generate_access_token_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' + assert response.access_token == "access_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -3475,7 +3475,7 @@ def test_generate_id_token_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = common.GenerateIdTokenResponse( - token='token_value', + token="token_value", ) # Wrap the value into a proper Response obj @@ -3492,7 +3492,7 @@ def test_generate_id_token_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' + assert response.token == "token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -3581,7 +3581,7 @@ def test_sign_blob_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = common.SignBlobResponse( - key_id='key_id_value', + key_id="key_id_value", signed_blob=b'signed_blob_blob', ) @@ -3599,7 +3599,7 @@ def test_sign_blob_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' + assert response.key_id == "key_id_value" assert response.signed_blob == b'signed_blob_blob' @@ -3689,8 +3689,8 @@ def test_sign_jwt_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', + key_id="key_id_value", + signed_jwt="signed_jwt_value", ) # Wrap the value into a proper Response obj @@ -3707,8 +3707,8 @@ def test_sign_jwt_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' + assert response.key_id == "key_id_value" + assert response.signed_jwt == "signed_jwt_value" @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index 20d981ec3e..aaae32093e 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -69,10 +69,10 @@ from .types.trigger import Transport from .types.trigger import Trigger -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.eventarc_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.eventarc_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.eventarc_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.eventarc_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: @@ -82,20 +82,24 @@ _py_version_str = sys.version.split()[0] _package_label = "google.cloud.eventarc_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -133,64 +137,68 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'EventarcAsyncClient', -'Channel', -'ChannelConnection', -'CloudRun', -'CreateChannelConnectionRequest', -'CreateChannelRequest', -'CreateTriggerRequest', -'DeleteChannelConnectionRequest', -'DeleteChannelRequest', -'DeleteTriggerRequest', -'Destination', -'EventFilter', -'EventType', -'EventarcClient', -'FilteringAttribute', -'GKE', -'GetChannelConnectionRequest', -'GetChannelRequest', -'GetGoogleChannelConfigRequest', -'GetProviderRequest', -'GetTriggerRequest', -'GoogleChannelConfig', -'ListChannelConnectionsRequest', -'ListChannelConnectionsResponse', -'ListChannelsRequest', -'ListChannelsResponse', -'ListProvidersRequest', -'ListProvidersResponse', -'ListTriggersRequest', -'ListTriggersResponse', -'OperationMetadata', -'Provider', -'Pubsub', -'StateCondition', -'Transport', -'Trigger', -'UpdateChannelRequest', -'UpdateGoogleChannelConfigRequest', -'UpdateTriggerRequest', + "Channel", + "ChannelConnection", + "CloudRun", + "CreateChannelConnectionRequest", + "CreateChannelRequest", + "CreateTriggerRequest", + "DeleteChannelConnectionRequest", + "DeleteChannelRequest", + "DeleteTriggerRequest", + "Destination", + "EventarcAsyncClient", + "EventarcClient", + "EventFilter", + "EventType", + "FilteringAttribute", + "GetChannelConnectionRequest", + "GetChannelRequest", + "GetGoogleChannelConfigRequest", + "GetProviderRequest", + "GetTriggerRequest", + "GKE", + "GoogleChannelConfig", + "ListChannelConnectionsRequest", + "ListChannelConnectionsResponse", + "ListChannelsRequest", + "ListChannelsResponse", + "ListProvidersRequest", + "ListProvidersResponse", + "ListTriggersRequest", + "ListTriggersResponse", + "OperationMetadata", + "Provider", + "Pubsub", + "StateCondition", + "Transport", + "Trigger", + "UpdateChannelRequest", + "UpdateGoogleChannelConfigRequest", + "UpdateTriggerRequest", ) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py index c604e58a85..3767771ed5 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py @@ -17,6 +17,6 @@ from .async_client import EventarcAsyncClient __all__ = ( - 'EventarcClient', - 'EventarcAsyncClient', + "EventarcClient", + "EventarcAsyncClient", ) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 09b53be781..16d4c9a981 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.eventarc_v1 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -47,10 +58,10 @@ from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -59,12 +70,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class EventarcAsyncClient: """Eventarc allows users to subscribe to various events that are provided by Google Cloud services and forward them to supported @@ -208,12 +221,14 @@ def universe_domain(self) -> str: get_transport_class = EventarcClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, EventarcTransport, Callable[..., EventarcTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, EventarcTransport, Callable[..., EventarcTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the eventarc async client. Args: @@ -271,31 +286,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.eventarc_v1.EventarcAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.eventarc.v1.Eventarc", "credentialsType": None, - } + }, ) - async def get_trigger(self, - request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> trigger.Trigger: + async def get_trigger( + self, + request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> trigger.Trigger: r"""Get a single trigger. .. code-block:: python @@ -355,8 +372,7 @@ async def sample_get_trigger(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -374,11 +390,13 @@ async def sample_get_trigger(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -394,14 +412,15 @@ async def sample_get_trigger(): # Done; return the response. return response - async def list_triggers(self, - request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTriggersAsyncPager: + async def list_triggers( + self, + request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTriggersAsyncPager: r"""List triggers. .. code-block:: python @@ -464,8 +483,7 @@ async def sample_list_triggers(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -483,11 +501,13 @@ async def sample_list_triggers(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -514,16 +534,17 @@ async def sample_list_triggers(): # Done; return the response. return response - async def create_trigger(self, - request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - trigger: Optional[gce_trigger.Trigger] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_trigger( + self, + request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + trigger: Optional[gce_trigger.Trigger] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Create a new trigger in a particular project and location. @@ -613,8 +634,7 @@ async def sample_create_trigger(): flattened_params = [parent, trigger, trigger_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -636,11 +656,13 @@ async def sample_create_trigger(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -664,16 +686,17 @@ async def sample_create_trigger(): # Done; return the response. return response - async def update_trigger(self, - request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, - *, - trigger: Optional[gce_trigger.Trigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - allow_missing: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_trigger( + self, + request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, + *, + trigger: Optional[gce_trigger.Trigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + allow_missing: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Update a single trigger. .. code-block:: python @@ -755,8 +778,7 @@ async def sample_update_trigger(): flattened_params = [trigger, update_mask, allow_missing] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -778,11 +800,13 @@ async def sample_update_trigger(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("trigger.name", request.trigger.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -806,15 +830,16 @@ async def sample_update_trigger(): # Done; return the response. return response - async def delete_trigger(self, - request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - allow_missing: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_trigger( + self, + request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + allow_missing: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Delete a single trigger. .. code-block:: python @@ -890,8 +915,7 @@ async def sample_delete_trigger(): flattened_params = [name, allow_missing] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -911,11 +935,13 @@ async def sample_delete_trigger(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -939,14 +965,15 @@ async def sample_delete_trigger(): # Done; return the response. return response - async def get_channel(self, - request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> channel.Channel: + async def get_channel( + self, + request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel.Channel: r"""Get a single Channel. .. code-block:: python @@ -1012,8 +1039,7 @@ async def sample_get_channel(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1031,11 +1057,13 @@ async def sample_get_channel(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1051,14 +1079,15 @@ async def sample_get_channel(): # Done; return the response. return response - async def list_channels(self, - request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListChannelsAsyncPager: + async def list_channels( + self, + request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChannelsAsyncPager: r"""List channels. .. code-block:: python @@ -1121,8 +1150,7 @@ async def sample_list_channels(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1140,11 +1168,13 @@ async def sample_list_channels(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1171,16 +1201,17 @@ async def sample_list_channels(): # Done; return the response. return response - async def create_channel(self, - request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, - *, - parent: Optional[str] = None, - channel: Optional[gce_channel.Channel] = None, - channel_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_channel( + self, + request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel: Optional[gce_channel.Channel] = None, + channel_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Create a new channel in a particular project and location. @@ -1270,8 +1301,7 @@ async def sample_create_channel(): flattened_params = [parent, channel, channel_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1293,11 +1323,13 @@ async def sample_create_channel(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1321,15 +1353,16 @@ async def sample_create_channel(): # Done; return the response. return response - async def update_channel(self, - request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, - *, - channel: Optional[gce_channel.Channel] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_channel( + self, + request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, + *, + channel: Optional[gce_channel.Channel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Update a single channel. .. code-block:: python @@ -1406,8 +1439,7 @@ async def sample_update_channel(): flattened_params = [channel, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1427,11 +1459,13 @@ async def sample_update_channel(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("channel.name", request.channel.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1455,14 +1489,15 @@ async def sample_update_channel(): # Done; return the response. return response - async def delete_channel(self, - request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_channel( + self, + request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Delete a single channel. .. code-block:: python @@ -1533,8 +1568,7 @@ async def sample_delete_channel(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1552,11 +1586,13 @@ async def sample_delete_channel(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1580,14 +1616,15 @@ async def sample_delete_channel(): # Done; return the response. return response - async def get_provider(self, - request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> discovery.Provider: + async def get_provider( + self, + request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discovery.Provider: r"""Get a single Provider. .. code-block:: python @@ -1647,8 +1684,7 @@ async def sample_get_provider(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1666,11 +1702,13 @@ async def sample_get_provider(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1686,14 +1724,15 @@ async def sample_get_provider(): # Done; return the response. return response - async def list_providers(self, - request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListProvidersAsyncPager: + async def list_providers( + self, + request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListProvidersAsyncPager: r"""List providers. .. code-block:: python @@ -1756,8 +1795,7 @@ async def sample_list_providers(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1775,11 +1813,13 @@ async def sample_list_providers(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1806,14 +1846,15 @@ async def sample_list_providers(): # Done; return the response. return response - async def get_channel_connection(self, - request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> channel_connection.ChannelConnection: + async def get_channel_connection( + self, + request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel_connection.ChannelConnection: r"""Get a single ChannelConnection. .. code-block:: python @@ -1878,8 +1919,7 @@ async def sample_get_channel_connection(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1897,11 +1937,13 @@ async def sample_get_channel_connection(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1917,14 +1959,15 @@ async def sample_get_channel_connection(): # Done; return the response. return response - async def list_channel_connections(self, - request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListChannelConnectionsAsyncPager: + async def list_channel_connections( + self, + request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChannelConnectionsAsyncPager: r"""List channel connections. .. code-block:: python @@ -1988,8 +2031,7 @@ async def sample_list_channel_connections(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2007,11 +2049,13 @@ async def sample_list_channel_connections(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2038,16 +2082,17 @@ async def sample_list_channel_connections(): # Done; return the response. return response - async def create_channel_connection(self, - request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, - channel_connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_channel_connection( + self, + request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, + channel_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Create a new ChannelConnection in a particular project and location. @@ -2137,8 +2182,7 @@ async def sample_create_channel_connection(): flattened_params = [parent, channel_connection, channel_connection_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2160,11 +2204,13 @@ async def sample_create_channel_connection(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2188,14 +2234,15 @@ async def sample_create_channel_connection(): # Done; return the response. return response - async def delete_channel_connection(self, - request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_channel_connection( + self, + request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Delete a single ChannelConnection. .. code-block:: python @@ -2264,8 +2311,7 @@ async def sample_delete_channel_connection(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2283,11 +2329,13 @@ async def sample_delete_channel_connection(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2311,14 +2359,15 @@ async def sample_delete_channel_connection(): # Done; return the response. return response - async def get_google_channel_config(self, - request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> google_channel_config.GoogleChannelConfig: + async def get_google_channel_config( + self, + request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> google_channel_config.GoogleChannelConfig: r"""Get a GoogleChannelConfig .. code-block:: python @@ -2384,8 +2433,7 @@ async def sample_get_google_channel_config(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2403,11 +2451,13 @@ async def sample_get_google_channel_config(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2423,15 +2473,16 @@ async def sample_get_google_channel_config(): # Done; return the response. return response - async def update_google_channel_config(self, - request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, - *, - google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gce_google_channel_config.GoogleChannelConfig: + async def update_google_channel_config( + self, + request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, + *, + google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: r"""Update a single GoogleChannelConfig .. code-block:: python @@ -2507,8 +2558,7 @@ async def sample_update_google_channel_config(): flattened_params = [google_channel_config, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2528,11 +2578,13 @@ async def sample_update_google_channel_config(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("google_channel_config.name", request.google_channel_config.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2585,17 +2637,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2637,17 +2690,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2693,16 +2747,18 @@ async def delete_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def cancel_operation( self, @@ -2744,16 +2800,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def set_iam_policy( self, @@ -2858,17 +2916,18 @@ async def set_iam_policy( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2977,17 +3036,18 @@ async def get_iam_policy( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3034,17 +3094,18 @@ async def test_iam_permissions( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3086,17 +3147,18 @@ async def get_location( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3138,17 +3200,18 @@ async def list_locations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3159,12 +3222,11 @@ async def __aenter__(self) -> "EventarcAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "EventarcAsyncClient", -) +__all__ = ("EventarcAsyncClient",) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 3accde24c6..50506bec8d 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.eventarc_v1 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -61,10 +74,10 @@ from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -80,14 +93,16 @@ class EventarcClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[EventarcTransport]] _transport_registry["grpc"] = EventarcGrpcTransport _transport_registry["grpc_asyncio"] = EventarcGrpcAsyncIOTransport _transport_registry["rest"] = EventarcRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[EventarcTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[EventarcTransport]: """Returns an appropriate transport class. Args: @@ -126,9 +141,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -137,16 +150,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "eventarc.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "eventarc.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -160,21 +172,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -209,7 +219,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): EventarcClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -226,67 +237,127 @@ def transport(self) -> EventarcTransport: return self._transport @staticmethod - def channel_path(project: str,location: str,channel: str,) -> str: + def channel_path( + project: str, + location: str, + channel: str, + ) -> str: """Returns a fully-qualified channel string.""" - return "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + return "projects/{project}/locations/{location}/channels/{channel}".format( + project=project, + location=location, + channel=channel, + ) @staticmethod - def parse_channel_path(path: str) -> Dict[str,str]: + def parse_channel_path( + path: str, + ) -> Dict[str, str]: """Parses a channel path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/channels/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def channel_connection_path(project: str,location: str,channel_connection: str,) -> str: + def channel_connection_path( + project: str, + location: str, + channel_connection: str, + ) -> str: """Returns a fully-qualified channel_connection string.""" - return "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) + return "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format( + project=project, + location=location, + channel_connection=channel_connection, + ) @staticmethod - def parse_channel_connection_path(path: str) -> Dict[str,str]: + def parse_channel_connection_path( + path: str, + ) -> Dict[str, str]: """Parses a channel_connection path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/channelConnections/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def cloud_function_path(project: str,location: str,function: str,) -> str: + def cloud_function_path( + project: str, + location: str, + function: str, + ) -> str: """Returns a fully-qualified cloud_function string.""" - return "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + return "projects/{project}/locations/{location}/functions/{function}".format( + project=project, + location=location, + function=function, + ) @staticmethod - def parse_cloud_function_path(path: str) -> Dict[str,str]: + def parse_cloud_function_path( + path: str, + ) -> Dict[str, str]: """Parses a cloud_function path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/functions/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: """Returns a fully-qualified crypto_key string.""" - return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) @staticmethod - def parse_crypto_key_path(path: str) -> Dict[str,str]: + def parse_crypto_key_path( + path: str, + ) -> Dict[str, str]: """Parses a crypto_key path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def google_channel_config_path(project: str,location: str,) -> str: + def google_channel_config_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified google_channel_config string.""" - return "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + return "projects/{project}/locations/{location}/googleChannelConfig".format( + project=project, + location=location, + ) @staticmethod - def parse_google_channel_config_path(path: str) -> Dict[str,str]: + def parse_google_channel_config_path( + path: str, + ) -> Dict[str, str]: """Parses a google_channel_config path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/googleChannelConfig$", path) return m.groupdict() if m else {} @staticmethod - def provider_path(project: str,location: str,provider: str,) -> str: + def provider_path( + project: str, + location: str, + provider: str, + ) -> str: """Returns a fully-qualified provider string.""" - return "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + return "projects/{project}/locations/{location}/providers/{provider}".format( + project=project, + location=location, + provider=provider, + ) @staticmethod - def parse_provider_path(path: str) -> Dict[str,str]: + def parse_provider_path( + path: str, + ) -> Dict[str, str]: """Parses a provider path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/providers/(?P.+?)$", path) return m.groupdict() if m else {} @@ -297,101 +368,166 @@ def service_path() -> str: return "*".format() @staticmethod - def parse_service_path(path: str) -> Dict[str,str]: + def parse_service_path( + path: str, + ) -> Dict[str, str]: """Parses a service path into its component segments.""" m = re.match(r"^.*$", path) return m.groupdict() if m else {} @staticmethod - def service_account_path(project: str,service_account: str,) -> str: + def service_account_path( + project: str, + service_account: str, + ) -> str: """Returns a fully-qualified service_account string.""" - return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, + service_account=service_account, + ) @staticmethod - def parse_service_account_path(path: str) -> Dict[str,str]: + def parse_service_account_path( + path: str, + ) -> Dict[str, str]: """Parses a service_account path into its component segments.""" m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def trigger_path(project: str,location: str,trigger: str,) -> str: + def trigger_path( + project: str, + location: str, + trigger: str, + ) -> str: """Returns a fully-qualified trigger string.""" - return "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + return "projects/{project}/locations/{location}/triggers/{trigger}".format( + project=project, + location=location, + trigger=trigger, + ) @staticmethod - def parse_trigger_path(path: str) -> Dict[str,str]: + def parse_trigger_path( + path: str, + ) -> Dict[str, str]: """Parses a trigger path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def workflow_path(project: str,location: str,workflow: str,) -> str: + def workflow_path( + project: str, + location: str, + workflow: str, + ) -> str: """Returns a fully-qualified workflow string.""" - return "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + return "projects/{project}/locations/{location}/workflows/{workflow}".format( + project=project, + location=location, + workflow=workflow, + ) @staticmethod - def parse_workflow_path(path: str) -> Dict[str,str]: + def parse_workflow_path( + path: str, + ) -> Dict[str, str]: """Parses a workflow path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -423,8 +559,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = EventarcClient._use_client_cert_effective() @@ -554,7 +692,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -594,12 +732,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, EventarcTransport, Callable[..., EventarcTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, EventarcTransport, Callable[..., EventarcTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the eventarc client. Args: @@ -659,12 +799,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = EventarcClient._read_environment_variables() self._client_cert_source = EventarcClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = EventarcClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -684,22 +824,22 @@ def __init__(self, *, if transport_provided: # transport is a EventarcTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(EventarcTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - EventarcClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or EventarcClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -729,25 +869,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.eventarc_v1.EventarcClient`.", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.eventarc.v1.Eventarc", "credentialsType": None, - } + }, ) - def get_trigger(self, - request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> trigger.Trigger: + def get_trigger( + self, + request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> trigger.Trigger: r"""Get a single trigger. .. code-block:: python @@ -807,8 +950,7 @@ def sample_get_trigger(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -825,11 +967,13 @@ def sample_get_trigger(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -845,14 +989,15 @@ def sample_get_trigger(): # Done; return the response. return response - def list_triggers(self, - request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListTriggersPager: + def list_triggers( + self, + request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTriggersPager: r"""List triggers. .. code-block:: python @@ -915,8 +1060,7 @@ def sample_list_triggers(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -933,11 +1077,13 @@ def sample_list_triggers(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -964,16 +1110,17 @@ def sample_list_triggers(): # Done; return the response. return response - def create_trigger(self, - request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, - *, - parent: Optional[str] = None, - trigger: Optional[gce_trigger.Trigger] = None, - trigger_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_trigger( + self, + request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, + *, + parent: Optional[str] = None, + trigger: Optional[gce_trigger.Trigger] = None, + trigger_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Create a new trigger in a particular project and location. @@ -1063,8 +1210,7 @@ def sample_create_trigger(): flattened_params = [parent, trigger, trigger_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1085,11 +1231,13 @@ def sample_create_trigger(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1113,16 +1261,17 @@ def sample_create_trigger(): # Done; return the response. return response - def update_trigger(self, - request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, - *, - trigger: Optional[gce_trigger.Trigger] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - allow_missing: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_trigger( + self, + request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, + *, + trigger: Optional[gce_trigger.Trigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + allow_missing: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Update a single trigger. .. code-block:: python @@ -1204,8 +1353,7 @@ def sample_update_trigger(): flattened_params = [trigger, update_mask, allow_missing] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1226,11 +1374,13 @@ def sample_update_trigger(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("trigger.name", request.trigger.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1254,15 +1404,16 @@ def sample_update_trigger(): # Done; return the response. return response - def delete_trigger(self, - request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, - *, - name: Optional[str] = None, - allow_missing: Optional[bool] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_trigger( + self, + request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, + *, + name: Optional[str] = None, + allow_missing: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Delete a single trigger. .. code-block:: python @@ -1338,8 +1489,7 @@ def sample_delete_trigger(): flattened_params = [name, allow_missing] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1358,11 +1508,13 @@ def sample_delete_trigger(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1386,14 +1538,15 @@ def sample_delete_trigger(): # Done; return the response. return response - def get_channel(self, - request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> channel.Channel: + def get_channel( + self, + request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel.Channel: r"""Get a single Channel. .. code-block:: python @@ -1459,8 +1612,7 @@ def sample_get_channel(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1477,11 +1629,13 @@ def sample_get_channel(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1497,14 +1651,15 @@ def sample_get_channel(): # Done; return the response. return response - def list_channels(self, - request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListChannelsPager: + def list_channels( + self, + request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChannelsPager: r"""List channels. .. code-block:: python @@ -1567,8 +1722,7 @@ def sample_list_channels(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1585,11 +1739,13 @@ def sample_list_channels(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1616,16 +1772,17 @@ def sample_list_channels(): # Done; return the response. return response - def create_channel(self, - request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, - *, - parent: Optional[str] = None, - channel: Optional[gce_channel.Channel] = None, - channel_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_channel( + self, + request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel: Optional[gce_channel.Channel] = None, + channel_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Create a new channel in a particular project and location. @@ -1715,8 +1872,7 @@ def sample_create_channel(): flattened_params = [parent, channel, channel_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1737,11 +1893,13 @@ def sample_create_channel(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1765,15 +1923,16 @@ def sample_create_channel(): # Done; return the response. return response - def update_channel(self, - request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, - *, - channel: Optional[gce_channel.Channel] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_channel( + self, + request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, + *, + channel: Optional[gce_channel.Channel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Update a single channel. .. code-block:: python @@ -1850,8 +2009,7 @@ def sample_update_channel(): flattened_params = [channel, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1870,11 +2028,13 @@ def sample_update_channel(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("channel.name", request.channel.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1898,14 +2058,15 @@ def sample_update_channel(): # Done; return the response. return response - def delete_channel(self, - request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_channel( + self, + request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Delete a single channel. .. code-block:: python @@ -1976,8 +2137,7 @@ def sample_delete_channel(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1994,11 +2154,13 @@ def sample_delete_channel(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2022,14 +2184,15 @@ def sample_delete_channel(): # Done; return the response. return response - def get_provider(self, - request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> discovery.Provider: + def get_provider( + self, + request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discovery.Provider: r"""Get a single Provider. .. code-block:: python @@ -2089,8 +2252,7 @@ def sample_get_provider(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2107,11 +2269,13 @@ def sample_get_provider(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2127,14 +2291,15 @@ def sample_get_provider(): # Done; return the response. return response - def list_providers(self, - request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListProvidersPager: + def list_providers( + self, + request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListProvidersPager: r"""List providers. .. code-block:: python @@ -2197,8 +2362,7 @@ def sample_list_providers(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2215,11 +2379,13 @@ def sample_list_providers(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2246,14 +2412,15 @@ def sample_list_providers(): # Done; return the response. return response - def get_channel_connection(self, - request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> channel_connection.ChannelConnection: + def get_channel_connection( + self, + request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel_connection.ChannelConnection: r"""Get a single ChannelConnection. .. code-block:: python @@ -2318,8 +2485,7 @@ def sample_get_channel_connection(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2336,11 +2502,13 @@ def sample_get_channel_connection(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2356,14 +2524,15 @@ def sample_get_channel_connection(): # Done; return the response. return response - def list_channel_connections(self, - request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListChannelConnectionsPager: + def list_channel_connections( + self, + request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListChannelConnectionsPager: r"""List channel connections. .. code-block:: python @@ -2427,8 +2596,7 @@ def sample_list_channel_connections(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2445,11 +2613,13 @@ def sample_list_channel_connections(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2476,16 +2646,17 @@ def sample_list_channel_connections(): # Done; return the response. return response - def create_channel_connection(self, - request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, - *, - parent: Optional[str] = None, - channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, - channel_connection_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_channel_connection( + self, + request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, + channel_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Create a new ChannelConnection in a particular project and location. @@ -2575,8 +2746,7 @@ def sample_create_channel_connection(): flattened_params = [parent, channel_connection, channel_connection_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2597,11 +2767,13 @@ def sample_create_channel_connection(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2625,14 +2797,15 @@ def sample_create_channel_connection(): # Done; return the response. return response - def delete_channel_connection(self, - request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_channel_connection( + self, + request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Delete a single ChannelConnection. .. code-block:: python @@ -2701,8 +2874,7 @@ def sample_delete_channel_connection(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2719,11 +2891,13 @@ def sample_delete_channel_connection(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2747,14 +2921,15 @@ def sample_delete_channel_connection(): # Done; return the response. return response - def get_google_channel_config(self, - request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> google_channel_config.GoogleChannelConfig: + def get_google_channel_config( + self, + request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> google_channel_config.GoogleChannelConfig: r"""Get a GoogleChannelConfig .. code-block:: python @@ -2820,8 +2995,7 @@ def sample_get_google_channel_config(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2838,11 +3012,13 @@ def sample_get_google_channel_config(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2858,15 +3034,16 @@ def sample_get_google_channel_config(): # Done; return the response. return response - def update_google_channel_config(self, - request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, - *, - google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> gce_google_channel_config.GoogleChannelConfig: + def update_google_channel_config( + self, + request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, + *, + google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: r"""Update a single GoogleChannelConfig .. code-block:: python @@ -2942,8 +3119,7 @@ def sample_update_google_channel_config(): flattened_params = [google_channel_config, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2962,11 +3138,13 @@ def sample_update_google_channel_config(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("google_channel_config.name", request.google_channel_config.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3032,10 +3210,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -3043,7 +3218,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3088,10 +3267,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -3099,7 +3275,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3148,16 +3328,18 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -3199,16 +3381,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def set_iam_policy( self, @@ -3313,10 +3497,7 @@ def set_iam_policy( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -3324,7 +3505,11 @@ def set_iam_policy( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3436,10 +3621,7 @@ def get_iam_policy( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -3447,7 +3629,11 @@ def get_iam_policy( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3497,10 +3683,7 @@ def test_iam_permissions( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),) # Validate the universe domain. self._validate_universe_domain() @@ -3508,7 +3691,11 @@ def test_iam_permissions( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3553,10 +3740,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -3564,7 +3748,11 @@ def get_location( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3609,10 +3797,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -3620,7 +3805,11 @@ def list_locations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3634,6 +3823,4 @@ def list_locations( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "EventarcClient", -) +__all__ = ("EventarcClient",) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 0b116f59f3..e46209c0d2 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -48,14 +49,17 @@ class ListTriggersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., eventarc.ListTriggersResponse], - request: eventarc.ListTriggersRequest, - response: eventarc.ListTriggersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., eventarc.ListTriggersResponse], + request: eventarc.ListTriggersRequest, + response: eventarc.ListTriggersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -96,7 +100,7 @@ def __iter__(self) -> Iterator[trigger.Trigger]: yield from page.triggers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListTriggersAsyncPager: @@ -116,14 +120,17 @@ class ListTriggersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[eventarc.ListTriggersResponse]], - request: eventarc.ListTriggersRequest, - response: eventarc.ListTriggersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[eventarc.ListTriggersResponse]], + request: eventarc.ListTriggersRequest, + response: eventarc.ListTriggersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -158,6 +165,7 @@ async def pages(self) -> AsyncIterator[eventarc.ListTriggersResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[trigger.Trigger]: async def async_generator(): async for page in self.pages: @@ -167,7 +175,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListChannelsPager: @@ -187,14 +195,17 @@ class ListChannelsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., eventarc.ListChannelsResponse], - request: eventarc.ListChannelsRequest, - response: eventarc.ListChannelsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., eventarc.ListChannelsResponse], + request: eventarc.ListChannelsRequest, + response: eventarc.ListChannelsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -235,7 +246,7 @@ def __iter__(self) -> Iterator[channel.Channel]: yield from page.channels def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListChannelsAsyncPager: @@ -255,14 +266,17 @@ class ListChannelsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[eventarc.ListChannelsResponse]], - request: eventarc.ListChannelsRequest, - response: eventarc.ListChannelsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[eventarc.ListChannelsResponse]], + request: eventarc.ListChannelsRequest, + response: eventarc.ListChannelsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -297,6 +311,7 @@ async def pages(self) -> AsyncIterator[eventarc.ListChannelsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[channel.Channel]: async def async_generator(): async for page in self.pages: @@ -306,7 +321,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListProvidersPager: @@ -326,14 +341,17 @@ class ListProvidersPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., eventarc.ListProvidersResponse], - request: eventarc.ListProvidersRequest, - response: eventarc.ListProvidersResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., eventarc.ListProvidersResponse], + request: eventarc.ListProvidersRequest, + response: eventarc.ListProvidersResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -374,7 +392,7 @@ def __iter__(self) -> Iterator[discovery.Provider]: yield from page.providers def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListProvidersAsyncPager: @@ -394,14 +412,17 @@ class ListProvidersAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[eventarc.ListProvidersResponse]], - request: eventarc.ListProvidersRequest, - response: eventarc.ListProvidersResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[eventarc.ListProvidersResponse]], + request: eventarc.ListProvidersRequest, + response: eventarc.ListProvidersResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -436,6 +457,7 @@ async def pages(self) -> AsyncIterator[eventarc.ListProvidersResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[discovery.Provider]: async def async_generator(): async for page in self.pages: @@ -445,7 +467,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListChannelConnectionsPager: @@ -465,14 +487,17 @@ class ListChannelConnectionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., eventarc.ListChannelConnectionsResponse], - request: eventarc.ListChannelConnectionsRequest, - response: eventarc.ListChannelConnectionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., eventarc.ListChannelConnectionsResponse], + request: eventarc.ListChannelConnectionsRequest, + response: eventarc.ListChannelConnectionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -513,7 +538,7 @@ def __iter__(self) -> Iterator[channel_connection.ChannelConnection]: yield from page.channel_connections def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListChannelConnectionsAsyncPager: @@ -533,14 +558,17 @@ class ListChannelConnectionsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[eventarc.ListChannelConnectionsResponse]], - request: eventarc.ListChannelConnectionsRequest, - response: eventarc.ListChannelConnectionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[eventarc.ListChannelConnectionsResponse]], + request: eventarc.ListChannelConnectionsRequest, + response: eventarc.ListChannelConnectionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -575,6 +603,7 @@ async def pages(self) -> AsyncIterator[eventarc.ListChannelConnectionsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[channel_connection.ChannelConnection]: async def async_generator(): async for page in self.pages: @@ -584,4 +613,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py index c6c13c6a4c..0fb467a638 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py @@ -25,14 +25,14 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[EventarcTransport]] -_transport_registry['grpc'] = EventarcGrpcTransport -_transport_registry['grpc_asyncio'] = EventarcGrpcAsyncIOTransport -_transport_registry['rest'] = EventarcRestTransport +_transport_registry["grpc"] = EventarcGrpcTransport +_transport_registry["grpc_asyncio"] = EventarcGrpcAsyncIOTransport +_transport_registry["rest"] = EventarcRestTransport __all__ = ( - 'EventarcTransport', - 'EventarcGrpcTransport', - 'EventarcGrpcAsyncIOTransport', - 'EventarcRestTransport', - 'EventarcRestInterceptor', + "EventarcTransport", + "EventarcGrpcTransport", + "EventarcGrpcAsyncIOTransport", + "EventarcRestTransport", + "EventarcRestInterceptor", ) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index ca6cf24f12..d7fb089d42 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -25,7 +25,7 @@ from google.api_core import retry as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.cloud.eventarc_v1.types import channel @@ -35,10 +35,10 @@ from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -49,29 +49,32 @@ class EventarcTransport(abc.ABC): """Abstract transport class for Eventarc.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', + "https://www.googleapis.com/auth/cloud-platform", ) + # fmt: on - DEFAULT_HOST: str = 'eventarc.googleapis.com' + DEFAULT_HOST: str = "eventarc.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'eventarc.googleapis.com'). + The hostname to connect to (default: "eventarc.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -107,10 +110,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -118,15 +121,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -271,12 +278,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ @@ -287,167 +294,257 @@ def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() + # fmt: off @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - Union[ - trigger.Trigger, - Awaitable[trigger.Trigger] - ]]: + def get_trigger( + self, + ) -> Callable[ + [eventarc.GetTriggerRequest], + Union[ + trigger.Trigger, + Awaitable[trigger.Trigger] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - Union[ - eventarc.ListTriggersResponse, - Awaitable[eventarc.ListTriggersResponse] - ]]: + def list_triggers( + self, + ) -> Callable[ + [eventarc.ListTriggersRequest], + Union[ + eventarc.ListTriggersResponse, + Awaitable[eventarc.ListTriggersResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_trigger( + self, + ) -> Callable[ + [eventarc.CreateTriggerRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_trigger( + self, + ) -> Callable[ + [eventarc.UpdateTriggerRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_trigger( + self, + ) -> Callable[ + [eventarc.DeleteTriggerRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_channel(self) -> Callable[ - [eventarc.GetChannelRequest], - Union[ - channel.Channel, - Awaitable[channel.Channel] - ]]: + def get_channel( + self, + ) -> Callable[ + [eventarc.GetChannelRequest], + Union[ + channel.Channel, + Awaitable[channel.Channel] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_channels(self) -> Callable[ - [eventarc.ListChannelsRequest], - Union[ - eventarc.ListChannelsResponse, - Awaitable[eventarc.ListChannelsResponse] - ]]: + def list_channels( + self, + ) -> Callable[ + [eventarc.ListChannelsRequest], + Union[ + eventarc.ListChannelsResponse, + Awaitable[eventarc.ListChannelsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_channel_(self) -> Callable[ - [eventarc.CreateChannelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_channel_( + self, + ) -> Callable[ + [eventarc.CreateChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_channel(self) -> Callable[ - [eventarc.UpdateChannelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_channel( + self, + ) -> Callable[ + [eventarc.UpdateChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_channel(self) -> Callable[ - [eventarc.DeleteChannelRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_channel( + self, + ) -> Callable[ + [eventarc.DeleteChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_provider(self) -> Callable[ - [eventarc.GetProviderRequest], - Union[ - discovery.Provider, - Awaitable[discovery.Provider] - ]]: + def get_provider( + self, + ) -> Callable[ + [eventarc.GetProviderRequest], + Union[ + discovery.Provider, + Awaitable[discovery.Provider] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_providers(self) -> Callable[ - [eventarc.ListProvidersRequest], - Union[ - eventarc.ListProvidersResponse, - Awaitable[eventarc.ListProvidersResponse] - ]]: + def list_providers( + self, + ) -> Callable[ + [eventarc.ListProvidersRequest], + Union[ + eventarc.ListProvidersResponse, + Awaitable[eventarc.ListProvidersResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_channel_connection(self) -> Callable[ - [eventarc.GetChannelConnectionRequest], - Union[ - channel_connection.ChannelConnection, - Awaitable[channel_connection.ChannelConnection] - ]]: + def get_channel_connection( + self, + ) -> Callable[ + [eventarc.GetChannelConnectionRequest], + Union[ + channel_connection.ChannelConnection, + Awaitable[channel_connection.ChannelConnection] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_channel_connections(self) -> Callable[ - [eventarc.ListChannelConnectionsRequest], - Union[ - eventarc.ListChannelConnectionsResponse, - Awaitable[eventarc.ListChannelConnectionsResponse] - ]]: + def list_channel_connections( + self, + ) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + Union[ + eventarc.ListChannelConnectionsResponse, + Awaitable[eventarc.ListChannelConnectionsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_channel_connection(self) -> Callable[ - [eventarc.CreateChannelConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_channel_connection( + self, + ) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_channel_connection(self) -> Callable[ - [eventarc.DeleteChannelConnectionRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_channel_connection( + self, + ) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_google_channel_config(self) -> Callable[ - [eventarc.GetGoogleChannelConfigRequest], - Union[ - google_channel_config.GoogleChannelConfig, - Awaitable[google_channel_config.GoogleChannelConfig] - ]]: + def get_google_channel_config( + self, + ) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + Union[ + google_channel_config.GoogleChannelConfig, + Awaitable[google_channel_config.GoogleChannelConfig] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_google_channel_config(self) -> Callable[ - [eventarc.UpdateGoogleChannelConfigRequest], - Union[ - gce_google_channel_config.GoogleChannelConfig, - Awaitable[gce_google_channel_config.GoogleChannelConfig] - ]]: + def update_google_channel_config( + self, + ) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + Union[ + gce_google_channel_config.GoogleChannelConfig, + Awaitable[gce_google_channel_config.GoogleChannelConfig] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -461,46 +558,31 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: raise NotImplementedError() @property def set_iam_policy( self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]]]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]]]: raise NotImplementedError() @property @@ -516,19 +598,15 @@ def test_iam_permissions( raise NotImplementedError() @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]]]: raise NotImplementedError() @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]]]: raise NotImplementedError() @property @@ -536,6 +614,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'EventarcTransport', -) +__all__ = ("EventarcTransport",) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 887700548d..e0f04d365e 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -22,7 +22,7 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -38,14 +38,15 @@ from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -65,10 +66,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -76,7 +79,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -102,7 +105,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": client_call_details.method, "response": grpc_response, @@ -126,28 +129,31 @@ class EventarcGrpcTransport(EventarcTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'eventarc.googleapis.com'). + The hostname to connect to (default: "eventarc.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -222,7 +228,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -231,7 +238,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -266,19 +274,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -314,13 +324,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -332,17 +341,15 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - trigger.Trigger]: + def get_trigger( + self, + ) -> Callable[[eventarc.GetTriggerRequest], trigger.Trigger]: r"""Return a callable for the get trigger method over gRPC. Get a single trigger. @@ -357,18 +364,18 @@ def get_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_trigger' not in self._stubs: - self._stubs['get_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetTrigger', + if "get_trigger" not in self._stubs: + self._stubs["get_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetTrigger", request_serializer=eventarc.GetTriggerRequest.serialize, response_deserializer=trigger.Trigger.deserialize, ) - return self._stubs['get_trigger'] + return self._stubs["get_trigger"] @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - eventarc.ListTriggersResponse]: + def list_triggers( + self, + ) -> Callable[[eventarc.ListTriggersRequest], eventarc.ListTriggersResponse]: r"""Return a callable for the list triggers method over gRPC. List triggers. @@ -383,18 +390,18 @@ def list_triggers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_triggers' not in self._stubs: - self._stubs['list_triggers'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListTriggers', + if "list_triggers" not in self._stubs: + self._stubs["list_triggers"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListTriggers", request_serializer=eventarc.ListTriggersRequest.serialize, response_deserializer=eventarc.ListTriggersResponse.deserialize, ) - return self._stubs['list_triggers'] + return self._stubs["list_triggers"] @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - operations_pb2.Operation]: + def create_trigger( + self, + ) -> Callable[[eventarc.CreateTriggerRequest], operations_pb2.Operation]: r"""Return a callable for the create trigger method over gRPC. Create a new trigger in a particular project and @@ -410,18 +417,18 @@ def create_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_trigger' not in self._stubs: - self._stubs['create_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', + if "create_trigger" not in self._stubs: + self._stubs["create_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateTrigger", request_serializer=eventarc.CreateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_trigger'] + return self._stubs["create_trigger"] @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - operations_pb2.Operation]: + def update_trigger( + self, + ) -> Callable[[eventarc.UpdateTriggerRequest], operations_pb2.Operation]: r"""Return a callable for the update trigger method over gRPC. Update a single trigger. @@ -436,18 +443,18 @@ def update_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_trigger' not in self._stubs: - self._stubs['update_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', + if "update_trigger" not in self._stubs: + self._stubs["update_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateTrigger", request_serializer=eventarc.UpdateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_trigger'] + return self._stubs["update_trigger"] @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - operations_pb2.Operation]: + def delete_trigger( + self, + ) -> Callable[[eventarc.DeleteTriggerRequest], operations_pb2.Operation]: r"""Return a callable for the delete trigger method over gRPC. Delete a single trigger. @@ -462,18 +469,18 @@ def delete_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_trigger' not in self._stubs: - self._stubs['delete_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', + if "delete_trigger" not in self._stubs: + self._stubs["delete_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteTrigger", request_serializer=eventarc.DeleteTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_trigger'] + return self._stubs["delete_trigger"] @property - def get_channel(self) -> Callable[ - [eventarc.GetChannelRequest], - channel.Channel]: + def get_channel( + self, + ) -> Callable[[eventarc.GetChannelRequest], channel.Channel]: r"""Return a callable for the get channel method over gRPC. Get a single Channel. @@ -488,18 +495,18 @@ def get_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_channel' not in self._stubs: - self._stubs['get_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetChannel', + if "get_channel" not in self._stubs: + self._stubs["get_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetChannel", request_serializer=eventarc.GetChannelRequest.serialize, response_deserializer=channel.Channel.deserialize, ) - return self._stubs['get_channel'] + return self._stubs["get_channel"] @property - def list_channels(self) -> Callable[ - [eventarc.ListChannelsRequest], - eventarc.ListChannelsResponse]: + def list_channels( + self, + ) -> Callable[[eventarc.ListChannelsRequest], eventarc.ListChannelsResponse]: r"""Return a callable for the list channels method over gRPC. List channels. @@ -514,18 +521,18 @@ def list_channels(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_channels' not in self._stubs: - self._stubs['list_channels'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListChannels', + if "list_channels" not in self._stubs: + self._stubs["list_channels"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListChannels", request_serializer=eventarc.ListChannelsRequest.serialize, response_deserializer=eventarc.ListChannelsResponse.deserialize, ) - return self._stubs['list_channels'] + return self._stubs["list_channels"] @property - def create_channel_(self) -> Callable[ - [eventarc.CreateChannelRequest], - operations_pb2.Operation]: + def create_channel_( + self, + ) -> Callable[[eventarc.CreateChannelRequest], operations_pb2.Operation]: r"""Return a callable for the create channel method over gRPC. Create a new channel in a particular project and @@ -541,18 +548,18 @@ def create_channel_(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_channel_' not in self._stubs: - self._stubs['create_channel_'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateChannel', + if "create_channel_" not in self._stubs: + self._stubs["create_channel_"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateChannel", request_serializer=eventarc.CreateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_channel_'] + return self._stubs["create_channel_"] @property - def update_channel(self) -> Callable[ - [eventarc.UpdateChannelRequest], - operations_pb2.Operation]: + def update_channel( + self, + ) -> Callable[[eventarc.UpdateChannelRequest], operations_pb2.Operation]: r"""Return a callable for the update channel method over gRPC. Update a single channel. @@ -567,18 +574,18 @@ def update_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_channel' not in self._stubs: - self._stubs['update_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', + if "update_channel" not in self._stubs: + self._stubs["update_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateChannel", request_serializer=eventarc.UpdateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_channel'] + return self._stubs["update_channel"] @property - def delete_channel(self) -> Callable[ - [eventarc.DeleteChannelRequest], - operations_pb2.Operation]: + def delete_channel( + self, + ) -> Callable[[eventarc.DeleteChannelRequest], operations_pb2.Operation]: r"""Return a callable for the delete channel method over gRPC. Delete a single channel. @@ -593,18 +600,18 @@ def delete_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_channel' not in self._stubs: - self._stubs['delete_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', + if "delete_channel" not in self._stubs: + self._stubs["delete_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteChannel", request_serializer=eventarc.DeleteChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_channel'] + return self._stubs["delete_channel"] @property - def get_provider(self) -> Callable[ - [eventarc.GetProviderRequest], - discovery.Provider]: + def get_provider( + self, + ) -> Callable[[eventarc.GetProviderRequest], discovery.Provider]: r"""Return a callable for the get provider method over gRPC. Get a single Provider. @@ -619,18 +626,18 @@ def get_provider(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_provider' not in self._stubs: - self._stubs['get_provider'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetProvider', + if "get_provider" not in self._stubs: + self._stubs["get_provider"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetProvider", request_serializer=eventarc.GetProviderRequest.serialize, response_deserializer=discovery.Provider.deserialize, ) - return self._stubs['get_provider'] + return self._stubs["get_provider"] @property - def list_providers(self) -> Callable[ - [eventarc.ListProvidersRequest], - eventarc.ListProvidersResponse]: + def list_providers( + self, + ) -> Callable[[eventarc.ListProvidersRequest], eventarc.ListProvidersResponse]: r"""Return a callable for the list providers method over gRPC. List providers. @@ -645,18 +652,18 @@ def list_providers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_providers' not in self._stubs: - self._stubs['list_providers'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListProviders', + if "list_providers" not in self._stubs: + self._stubs["list_providers"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListProviders", request_serializer=eventarc.ListProvidersRequest.serialize, response_deserializer=eventarc.ListProvidersResponse.deserialize, ) - return self._stubs['list_providers'] + return self._stubs["list_providers"] @property - def get_channel_connection(self) -> Callable[ - [eventarc.GetChannelConnectionRequest], - channel_connection.ChannelConnection]: + def get_channel_connection( + self, + ) -> Callable[[eventarc.GetChannelConnectionRequest], channel_connection.ChannelConnection]: r"""Return a callable for the get channel connection method over gRPC. Get a single ChannelConnection. @@ -671,18 +678,18 @@ def get_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_channel_connection' not in self._stubs: - self._stubs['get_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', + if "get_channel_connection" not in self._stubs: + self._stubs["get_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetChannelConnection", request_serializer=eventarc.GetChannelConnectionRequest.serialize, response_deserializer=channel_connection.ChannelConnection.deserialize, ) - return self._stubs['get_channel_connection'] + return self._stubs["get_channel_connection"] @property - def list_channel_connections(self) -> Callable[ - [eventarc.ListChannelConnectionsRequest], - eventarc.ListChannelConnectionsResponse]: + def list_channel_connections( + self, + ) -> Callable[[eventarc.ListChannelConnectionsRequest], eventarc.ListChannelConnectionsResponse]: r"""Return a callable for the list channel connections method over gRPC. List channel connections. @@ -697,18 +704,18 @@ def list_channel_connections(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_channel_connections' not in self._stubs: - self._stubs['list_channel_connections'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', + if "list_channel_connections" not in self._stubs: + self._stubs["list_channel_connections"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListChannelConnections", request_serializer=eventarc.ListChannelConnectionsRequest.serialize, response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, ) - return self._stubs['list_channel_connections'] + return self._stubs["list_channel_connections"] @property - def create_channel_connection(self) -> Callable[ - [eventarc.CreateChannelConnectionRequest], - operations_pb2.Operation]: + def create_channel_connection( + self, + ) -> Callable[[eventarc.CreateChannelConnectionRequest], operations_pb2.Operation]: r"""Return a callable for the create channel connection method over gRPC. Create a new ChannelConnection in a particular @@ -724,18 +731,18 @@ def create_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_channel_connection' not in self._stubs: - self._stubs['create_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', + if "create_channel_connection" not in self._stubs: + self._stubs["create_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection", request_serializer=eventarc.CreateChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_channel_connection'] + return self._stubs["create_channel_connection"] @property - def delete_channel_connection(self) -> Callable[ - [eventarc.DeleteChannelConnectionRequest], - operations_pb2.Operation]: + def delete_channel_connection( + self, + ) -> Callable[[eventarc.DeleteChannelConnectionRequest], operations_pb2.Operation]: r"""Return a callable for the delete channel connection method over gRPC. Delete a single ChannelConnection. @@ -750,18 +757,18 @@ def delete_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_channel_connection' not in self._stubs: - self._stubs['delete_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', + if "delete_channel_connection" not in self._stubs: + self._stubs["delete_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection", request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_channel_connection'] + return self._stubs["delete_channel_connection"] @property - def get_google_channel_config(self) -> Callable[ - [eventarc.GetGoogleChannelConfigRequest], - google_channel_config.GoogleChannelConfig]: + def get_google_channel_config( + self, + ) -> Callable[[eventarc.GetGoogleChannelConfigRequest], google_channel_config.GoogleChannelConfig]: r"""Return a callable for the get google channel config method over gRPC. Get a GoogleChannelConfig @@ -776,18 +783,18 @@ def get_google_channel_config(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_google_channel_config' not in self._stubs: - self._stubs['get_google_channel_config'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', + if "get_google_channel_config" not in self._stubs: + self._stubs["get_google_channel_config"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig", request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, ) - return self._stubs['get_google_channel_config'] + return self._stubs["get_google_channel_config"] @property - def update_google_channel_config(self) -> Callable[ - [eventarc.UpdateGoogleChannelConfigRequest], - gce_google_channel_config.GoogleChannelConfig]: + def update_google_channel_config( + self, + ) -> Callable[[eventarc.UpdateGoogleChannelConfigRequest], gce_google_channel_config.GoogleChannelConfig]: r"""Return a callable for the update google channel config method over gRPC. Update a single GoogleChannelConfig @@ -802,13 +809,13 @@ def update_google_channel_config(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_google_channel_config' not in self._stubs: - self._stubs['update_google_channel_config'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', + if "update_google_channel_config" not in self._stubs: + self._stubs["update_google_channel_config"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig", request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, ) - return self._stubs['update_google_channel_config'] + return self._stubs["update_google_channel_config"] def close(self): self._logged_channel.close() @@ -817,8 +824,7 @@ def close(self): def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -835,8 +841,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -853,8 +858,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -871,8 +875,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -889,8 +892,7 @@ def list_operations( def list_locations( self, ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -907,8 +909,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -975,9 +976,7 @@ def get_iam_policy( @property def test_iam_permissions( self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: + ) -> Callable[[iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control policy for a function. If the function does not exist, this will @@ -1005,6 +1004,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'EventarcGrpcTransport', -) +__all__ = ("EventarcGrpcTransport",) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 6c940b3a82..514c5f0ab2 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -25,13 +25,13 @@ from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.eventarc_v1.types import channel @@ -41,15 +41,16 @@ from google.cloud.eventarc_v1.types import google_channel_config from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO from .grpc import EventarcGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -69,10 +70,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -80,7 +83,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -106,7 +109,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -135,13 +138,15 @@ class EventarcGrpcAsyncIOTransport(EventarcTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -172,29 +177,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'eventarc.googleapis.com'). + The hostname to connect to (default: "eventarc.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -269,7 +276,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -278,7 +286,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -338,17 +347,15 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsAsyncClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - Awaitable[trigger.Trigger]]: + def get_trigger( + self, + ) -> Callable[[eventarc.GetTriggerRequest], Awaitable[trigger.Trigger]]: r"""Return a callable for the get trigger method over gRPC. Get a single trigger. @@ -363,18 +370,18 @@ def get_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_trigger' not in self._stubs: - self._stubs['get_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetTrigger', + if "get_trigger" not in self._stubs: + self._stubs["get_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetTrigger", request_serializer=eventarc.GetTriggerRequest.serialize, response_deserializer=trigger.Trigger.deserialize, ) - return self._stubs['get_trigger'] + return self._stubs["get_trigger"] @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - Awaitable[eventarc.ListTriggersResponse]]: + def list_triggers( + self, + ) -> Callable[[eventarc.ListTriggersRequest], Awaitable[eventarc.ListTriggersResponse]]: r"""Return a callable for the list triggers method over gRPC. List triggers. @@ -389,18 +396,18 @@ def list_triggers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_triggers' not in self._stubs: - self._stubs['list_triggers'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListTriggers', + if "list_triggers" not in self._stubs: + self._stubs["list_triggers"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListTriggers", request_serializer=eventarc.ListTriggersRequest.serialize, response_deserializer=eventarc.ListTriggersResponse.deserialize, ) - return self._stubs['list_triggers'] + return self._stubs["list_triggers"] @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - Awaitable[operations_pb2.Operation]]: + def create_trigger( + self, + ) -> Callable[[eventarc.CreateTriggerRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create trigger method over gRPC. Create a new trigger in a particular project and @@ -416,18 +423,18 @@ def create_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_trigger' not in self._stubs: - self._stubs['create_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', + if "create_trigger" not in self._stubs: + self._stubs["create_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateTrigger", request_serializer=eventarc.CreateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_trigger'] + return self._stubs["create_trigger"] @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - Awaitable[operations_pb2.Operation]]: + def update_trigger( + self, + ) -> Callable[[eventarc.UpdateTriggerRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update trigger method over gRPC. Update a single trigger. @@ -442,18 +449,18 @@ def update_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_trigger' not in self._stubs: - self._stubs['update_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', + if "update_trigger" not in self._stubs: + self._stubs["update_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateTrigger", request_serializer=eventarc.UpdateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_trigger'] + return self._stubs["update_trigger"] @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - Awaitable[operations_pb2.Operation]]: + def delete_trigger( + self, + ) -> Callable[[eventarc.DeleteTriggerRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete trigger method over gRPC. Delete a single trigger. @@ -468,18 +475,18 @@ def delete_trigger(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_trigger' not in self._stubs: - self._stubs['delete_trigger'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', + if "delete_trigger" not in self._stubs: + self._stubs["delete_trigger"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteTrigger", request_serializer=eventarc.DeleteTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_trigger'] + return self._stubs["delete_trigger"] @property - def get_channel(self) -> Callable[ - [eventarc.GetChannelRequest], - Awaitable[channel.Channel]]: + def get_channel( + self, + ) -> Callable[[eventarc.GetChannelRequest], Awaitable[channel.Channel]]: r"""Return a callable for the get channel method over gRPC. Get a single Channel. @@ -494,18 +501,18 @@ def get_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_channel' not in self._stubs: - self._stubs['get_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetChannel', + if "get_channel" not in self._stubs: + self._stubs["get_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetChannel", request_serializer=eventarc.GetChannelRequest.serialize, response_deserializer=channel.Channel.deserialize, ) - return self._stubs['get_channel'] + return self._stubs["get_channel"] @property - def list_channels(self) -> Callable[ - [eventarc.ListChannelsRequest], - Awaitable[eventarc.ListChannelsResponse]]: + def list_channels( + self, + ) -> Callable[[eventarc.ListChannelsRequest], Awaitable[eventarc.ListChannelsResponse]]: r"""Return a callable for the list channels method over gRPC. List channels. @@ -520,18 +527,18 @@ def list_channels(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_channels' not in self._stubs: - self._stubs['list_channels'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListChannels', + if "list_channels" not in self._stubs: + self._stubs["list_channels"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListChannels", request_serializer=eventarc.ListChannelsRequest.serialize, response_deserializer=eventarc.ListChannelsResponse.deserialize, ) - return self._stubs['list_channels'] + return self._stubs["list_channels"] @property - def create_channel_(self) -> Callable[ - [eventarc.CreateChannelRequest], - Awaitable[operations_pb2.Operation]]: + def create_channel_( + self, + ) -> Callable[[eventarc.CreateChannelRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create channel method over gRPC. Create a new channel in a particular project and @@ -547,18 +554,18 @@ def create_channel_(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_channel_' not in self._stubs: - self._stubs['create_channel_'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateChannel', + if "create_channel_" not in self._stubs: + self._stubs["create_channel_"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateChannel", request_serializer=eventarc.CreateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_channel_'] + return self._stubs["create_channel_"] @property - def update_channel(self) -> Callable[ - [eventarc.UpdateChannelRequest], - Awaitable[operations_pb2.Operation]]: + def update_channel( + self, + ) -> Callable[[eventarc.UpdateChannelRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update channel method over gRPC. Update a single channel. @@ -573,18 +580,18 @@ def update_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_channel' not in self._stubs: - self._stubs['update_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', + if "update_channel" not in self._stubs: + self._stubs["update_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateChannel", request_serializer=eventarc.UpdateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_channel'] + return self._stubs["update_channel"] @property - def delete_channel(self) -> Callable[ - [eventarc.DeleteChannelRequest], - Awaitable[operations_pb2.Operation]]: + def delete_channel( + self, + ) -> Callable[[eventarc.DeleteChannelRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete channel method over gRPC. Delete a single channel. @@ -599,18 +606,18 @@ def delete_channel(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_channel' not in self._stubs: - self._stubs['delete_channel'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', + if "delete_channel" not in self._stubs: + self._stubs["delete_channel"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteChannel", request_serializer=eventarc.DeleteChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_channel'] + return self._stubs["delete_channel"] @property - def get_provider(self) -> Callable[ - [eventarc.GetProviderRequest], - Awaitable[discovery.Provider]]: + def get_provider( + self, + ) -> Callable[[eventarc.GetProviderRequest], Awaitable[discovery.Provider]]: r"""Return a callable for the get provider method over gRPC. Get a single Provider. @@ -625,18 +632,18 @@ def get_provider(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_provider' not in self._stubs: - self._stubs['get_provider'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetProvider', + if "get_provider" not in self._stubs: + self._stubs["get_provider"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetProvider", request_serializer=eventarc.GetProviderRequest.serialize, response_deserializer=discovery.Provider.deserialize, ) - return self._stubs['get_provider'] + return self._stubs["get_provider"] @property - def list_providers(self) -> Callable[ - [eventarc.ListProvidersRequest], - Awaitable[eventarc.ListProvidersResponse]]: + def list_providers( + self, + ) -> Callable[[eventarc.ListProvidersRequest], Awaitable[eventarc.ListProvidersResponse]]: r"""Return a callable for the list providers method over gRPC. List providers. @@ -651,18 +658,18 @@ def list_providers(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_providers' not in self._stubs: - self._stubs['list_providers'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListProviders', + if "list_providers" not in self._stubs: + self._stubs["list_providers"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListProviders", request_serializer=eventarc.ListProvidersRequest.serialize, response_deserializer=eventarc.ListProvidersResponse.deserialize, ) - return self._stubs['list_providers'] + return self._stubs["list_providers"] @property - def get_channel_connection(self) -> Callable[ - [eventarc.GetChannelConnectionRequest], - Awaitable[channel_connection.ChannelConnection]]: + def get_channel_connection( + self, + ) -> Callable[[eventarc.GetChannelConnectionRequest], Awaitable[channel_connection.ChannelConnection]]: r"""Return a callable for the get channel connection method over gRPC. Get a single ChannelConnection. @@ -677,18 +684,18 @@ def get_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_channel_connection' not in self._stubs: - self._stubs['get_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', + if "get_channel_connection" not in self._stubs: + self._stubs["get_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetChannelConnection", request_serializer=eventarc.GetChannelConnectionRequest.serialize, response_deserializer=channel_connection.ChannelConnection.deserialize, ) - return self._stubs['get_channel_connection'] + return self._stubs["get_channel_connection"] @property - def list_channel_connections(self) -> Callable[ - [eventarc.ListChannelConnectionsRequest], - Awaitable[eventarc.ListChannelConnectionsResponse]]: + def list_channel_connections( + self, + ) -> Callable[[eventarc.ListChannelConnectionsRequest], Awaitable[eventarc.ListChannelConnectionsResponse]]: r"""Return a callable for the list channel connections method over gRPC. List channel connections. @@ -703,18 +710,18 @@ def list_channel_connections(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_channel_connections' not in self._stubs: - self._stubs['list_channel_connections'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', + if "list_channel_connections" not in self._stubs: + self._stubs["list_channel_connections"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/ListChannelConnections", request_serializer=eventarc.ListChannelConnectionsRequest.serialize, response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, ) - return self._stubs['list_channel_connections'] + return self._stubs["list_channel_connections"] @property - def create_channel_connection(self) -> Callable[ - [eventarc.CreateChannelConnectionRequest], - Awaitable[operations_pb2.Operation]]: + def create_channel_connection( + self, + ) -> Callable[[eventarc.CreateChannelConnectionRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create channel connection method over gRPC. Create a new ChannelConnection in a particular @@ -730,18 +737,18 @@ def create_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_channel_connection' not in self._stubs: - self._stubs['create_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', + if "create_channel_connection" not in self._stubs: + self._stubs["create_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection", request_serializer=eventarc.CreateChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_channel_connection'] + return self._stubs["create_channel_connection"] @property - def delete_channel_connection(self) -> Callable[ - [eventarc.DeleteChannelConnectionRequest], - Awaitable[operations_pb2.Operation]]: + def delete_channel_connection( + self, + ) -> Callable[[eventarc.DeleteChannelConnectionRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete channel connection method over gRPC. Delete a single ChannelConnection. @@ -756,18 +763,18 @@ def delete_channel_connection(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_channel_connection' not in self._stubs: - self._stubs['delete_channel_connection'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', + if "delete_channel_connection" not in self._stubs: + self._stubs["delete_channel_connection"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection", request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_channel_connection'] + return self._stubs["delete_channel_connection"] @property - def get_google_channel_config(self) -> Callable[ - [eventarc.GetGoogleChannelConfigRequest], - Awaitable[google_channel_config.GoogleChannelConfig]]: + def get_google_channel_config( + self, + ) -> Callable[[eventarc.GetGoogleChannelConfigRequest], Awaitable[google_channel_config.GoogleChannelConfig]]: r"""Return a callable for the get google channel config method over gRPC. Get a GoogleChannelConfig @@ -782,18 +789,18 @@ def get_google_channel_config(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_google_channel_config' not in self._stubs: - self._stubs['get_google_channel_config'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', + if "get_google_channel_config" not in self._stubs: + self._stubs["get_google_channel_config"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig", request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, ) - return self._stubs['get_google_channel_config'] + return self._stubs["get_google_channel_config"] @property - def update_google_channel_config(self) -> Callable[ - [eventarc.UpdateGoogleChannelConfigRequest], - Awaitable[gce_google_channel_config.GoogleChannelConfig]]: + def update_google_channel_config( + self, + ) -> Callable[[eventarc.UpdateGoogleChannelConfigRequest], Awaitable[gce_google_channel_config.GoogleChannelConfig]]: r"""Return a callable for the update google channel config method over gRPC. Update a single GoogleChannelConfig @@ -808,16 +815,16 @@ def update_google_channel_config(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_google_channel_config' not in self._stubs: - self._stubs['update_google_channel_config'] = self._logged_channel.unary_unary( - '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', + if "update_google_channel_config" not in self._stubs: + self._stubs["update_google_channel_config"] = self._logged_channel.unary_unary( + "/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig", request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, ) - return self._stubs['update_google_channel_config'] + return self._stubs["update_google_channel_config"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.get_trigger: self._wrap_method( self.get_trigger, @@ -972,8 +979,7 @@ def kind(self) -> str: def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -990,8 +996,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1008,8 +1013,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1026,8 +1030,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1044,8 +1047,7 @@ def list_operations( def list_locations( self, ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1062,8 +1064,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1130,9 +1131,7 @@ def get_iam_policy( @property def test_iam_permissions( self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: + ) -> Callable[[iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control policy for a function. If the function does not exist, this will @@ -1156,6 +1155,4 @@ def test_iam_permissions( return self._stubs["test_iam_permissions"] -__all__ = ( - 'EventarcGrpcAsyncIOTransport', -) +__all__ = ("EventarcGrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 1d4a69adf0..6fad307317 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -29,7 +29,7 @@ from google.api_core import operations_v1 from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses @@ -57,6 +57,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -237,7 +238,12 @@ def post_update_trigger(self, response): """ - def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_create_channel( + self, + request: eventarc.CreateChannelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_channel Override in a subclass to manipulate the request or metadata @@ -245,7 +251,10 @@ def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: S """ return request, metadata - def post_create_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_channel( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel DEPRECATED. Please use the `post_create_channel_with_metadata` @@ -258,7 +267,11 @@ def post_create_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_create_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_channel Override in a subclass to read or manipulate the response or metadata after it @@ -273,7 +286,11 @@ def post_create_channel_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_channel_connection( + self, + request: eventarc.CreateChannelConnectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_channel_connection Override in a subclass to manipulate the request or metadata @@ -281,7 +298,10 @@ def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectio """ return request, metadata - def post_create_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_channel_connection( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel_connection DEPRECATED. Please use the `post_create_channel_connection_with_metadata` @@ -294,7 +314,11 @@ def post_create_channel_connection(self, response: operations_pb2.Operation) -> """ return response - def post_create_channel_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_channel_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_channel_connection Override in a subclass to read or manipulate the response or metadata after it @@ -309,7 +333,11 @@ def post_create_channel_connection_with_metadata(self, response: operations_pb2. """ return response, metadata - def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_create_trigger( + self, + request: eventarc.CreateTriggerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_trigger Override in a subclass to manipulate the request or metadata @@ -317,7 +345,10 @@ def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: S """ return request, metadata - def post_create_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_trigger( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_trigger DEPRECATED. Please use the `post_create_trigger_with_metadata` @@ -330,7 +361,11 @@ def post_create_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_create_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_trigger Override in a subclass to read or manipulate the response or metadata after it @@ -345,7 +380,11 @@ def post_create_trigger_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_channel( + self, + request: eventarc.DeleteChannelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel Override in a subclass to manipulate the request or metadata @@ -353,7 +392,10 @@ def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: S """ return request, metadata - def post_delete_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_channel( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel DEPRECATED. Please use the `post_delete_channel_with_metadata` @@ -366,7 +408,11 @@ def post_delete_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_delete_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_channel Override in a subclass to read or manipulate the response or metadata after it @@ -381,7 +427,11 @@ def post_delete_channel_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_channel_connection( + self, + request: eventarc.DeleteChannelConnectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel_connection Override in a subclass to manipulate the request or metadata @@ -389,7 +439,10 @@ def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectio """ return request, metadata - def post_delete_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_channel_connection( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel_connection DEPRECATED. Please use the `post_delete_channel_connection_with_metadata` @@ -402,7 +455,11 @@ def post_delete_channel_connection(self, response: operations_pb2.Operation) -> """ return response - def post_delete_channel_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_channel_connection_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_channel_connection Override in a subclass to read or manipulate the response or metadata after it @@ -417,7 +474,11 @@ def post_delete_channel_connection_with_metadata(self, response: operations_pb2. """ return response, metadata - def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_trigger( + self, + request: eventarc.DeleteTriggerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_trigger Override in a subclass to manipulate the request or metadata @@ -425,7 +486,10 @@ def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: S """ return request, metadata - def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_trigger( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_trigger DEPRECATED. Please use the `post_delete_trigger_with_metadata` @@ -438,7 +502,11 @@ def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_delete_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_trigger Override in a subclass to read or manipulate the response or metadata after it @@ -453,7 +521,11 @@ def post_delete_trigger_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_channel( + self, + request: eventarc.GetChannelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel Override in a subclass to manipulate the request or metadata @@ -461,7 +533,10 @@ def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequenc """ return request, metadata - def post_get_channel(self, response: channel.Channel) -> channel.Channel: + def post_get_channel( + self, + response: channel.Channel, + ) -> channel.Channel: """Post-rpc interceptor for get_channel DEPRECATED. Please use the `post_get_channel_with_metadata` @@ -474,7 +549,11 @@ def post_get_channel(self, response: channel.Channel) -> channel.Channel: """ return response - def post_get_channel_with_metadata(self, response: channel.Channel, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_channel_with_metadata( + self, + response: channel.Channel, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_channel Override in a subclass to read or manipulate the response or metadata after it @@ -489,7 +568,11 @@ def post_get_channel_with_metadata(self, response: channel.Channel, metadata: Se """ return response, metadata - def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_channel_connection( + self, + request: eventarc.GetChannelConnectionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel_connection Override in a subclass to manipulate the request or metadata @@ -497,7 +580,10 @@ def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionReque """ return request, metadata - def post_get_channel_connection(self, response: channel_connection.ChannelConnection) -> channel_connection.ChannelConnection: + def post_get_channel_connection( + self, + response: channel_connection.ChannelConnection, + ) -> channel_connection.ChannelConnection: """Post-rpc interceptor for get_channel_connection DEPRECATED. Please use the `post_get_channel_connection_with_metadata` @@ -510,7 +596,11 @@ def post_get_channel_connection(self, response: channel_connection.ChannelConnec """ return response - def post_get_channel_connection_with_metadata(self, response: channel_connection.ChannelConnection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_channel_connection_with_metadata( + self, + response: channel_connection.ChannelConnection, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_channel_connection Override in a subclass to read or manipulate the response or metadata after it @@ -525,7 +615,11 @@ def post_get_channel_connection_with_metadata(self, response: channel_connection """ return response, metadata - def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_google_channel_config( + self, + request: eventarc.GetGoogleChannelConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_google_channel_config Override in a subclass to manipulate the request or metadata @@ -533,7 +627,10 @@ def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfig """ return request, metadata - def post_get_google_channel_config(self, response: google_channel_config.GoogleChannelConfig) -> google_channel_config.GoogleChannelConfig: + def post_get_google_channel_config( + self, + response: google_channel_config.GoogleChannelConfig, + ) -> google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for get_google_channel_config DEPRECATED. Please use the `post_get_google_channel_config_with_metadata` @@ -546,7 +643,11 @@ def post_get_google_channel_config(self, response: google_channel_config.GoogleC """ return response - def post_get_google_channel_config_with_metadata(self, response: google_channel_config.GoogleChannelConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_google_channel_config_with_metadata( + self, + response: google_channel_config.GoogleChannelConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_google_channel_config Override in a subclass to read or manipulate the response or metadata after it @@ -561,7 +662,11 @@ def post_get_google_channel_config_with_metadata(self, response: google_channel_ """ return response, metadata - def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_provider( + self, + request: eventarc.GetProviderRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_provider Override in a subclass to manipulate the request or metadata @@ -569,7 +674,10 @@ def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Seque """ return request, metadata - def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: + def post_get_provider( + self, + response: discovery.Provider, + ) -> discovery.Provider: """Post-rpc interceptor for get_provider DEPRECATED. Please use the `post_get_provider_with_metadata` @@ -582,7 +690,11 @@ def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: """ return response - def post_get_provider_with_metadata(self, response: discovery.Provider, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[discovery.Provider, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_provider_with_metadata( + self, + response: discovery.Provider, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[discovery.Provider, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_provider Override in a subclass to read or manipulate the response or metadata after it @@ -597,7 +709,11 @@ def post_get_provider_with_metadata(self, response: discovery.Provider, metadata """ return response, metadata - def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_trigger( + self, + request: eventarc.GetTriggerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_trigger Override in a subclass to manipulate the request or metadata @@ -605,7 +721,10 @@ def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequenc """ return request, metadata - def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: + def post_get_trigger( + self, + response: trigger.Trigger, + ) -> trigger.Trigger: """Post-rpc interceptor for get_trigger DEPRECATED. Please use the `post_get_trigger_with_metadata` @@ -618,7 +737,11 @@ def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: """ return response - def post_get_trigger_with_metadata(self, response: trigger.Trigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[trigger.Trigger, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_trigger_with_metadata( + self, + response: trigger.Trigger, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[trigger.Trigger, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_trigger Override in a subclass to read or manipulate the response or metadata after it @@ -633,7 +756,11 @@ def post_get_trigger_with_metadata(self, response: trigger.Trigger, metadata: Se """ return response, metadata - def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_channel_connections( + self, + request: eventarc.ListChannelConnectionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channel_connections Override in a subclass to manipulate the request or metadata @@ -641,7 +768,10 @@ def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsR """ return request, metadata - def post_list_channel_connections(self, response: eventarc.ListChannelConnectionsResponse) -> eventarc.ListChannelConnectionsResponse: + def post_list_channel_connections( + self, + response: eventarc.ListChannelConnectionsResponse, + ) -> eventarc.ListChannelConnectionsResponse: """Post-rpc interceptor for list_channel_connections DEPRECATED. Please use the `post_list_channel_connections_with_metadata` @@ -654,7 +784,11 @@ def post_list_channel_connections(self, response: eventarc.ListChannelConnection """ return response - def post_list_channel_connections_with_metadata(self, response: eventarc.ListChannelConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_channel_connections_with_metadata( + self, + response: eventarc.ListChannelConnectionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListChannelConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_channel_connections Override in a subclass to read or manipulate the response or metadata after it @@ -669,7 +803,11 @@ def post_list_channel_connections_with_metadata(self, response: eventarc.ListCha """ return response, metadata - def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_channels( + self, + request: eventarc.ListChannelsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channels Override in a subclass to manipulate the request or metadata @@ -677,7 +815,10 @@ def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Seq """ return request, metadata - def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventarc.ListChannelsResponse: + def post_list_channels( + self, + response: eventarc.ListChannelsResponse, + ) -> eventarc.ListChannelsResponse: """Post-rpc interceptor for list_channels DEPRECATED. Please use the `post_list_channels_with_metadata` @@ -690,7 +831,11 @@ def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventar """ return response - def post_list_channels_with_metadata(self, response: eventarc.ListChannelsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_channels_with_metadata( + self, + response: eventarc.ListChannelsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListChannelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_channels Override in a subclass to read or manipulate the response or metadata after it @@ -705,7 +850,11 @@ def post_list_channels_with_metadata(self, response: eventarc.ListChannelsRespon """ return response, metadata - def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_providers( + self, + request: eventarc.ListProvidersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_providers Override in a subclass to manipulate the request or metadata @@ -713,7 +862,10 @@ def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: S """ return request, metadata - def post_list_providers(self, response: eventarc.ListProvidersResponse) -> eventarc.ListProvidersResponse: + def post_list_providers( + self, + response: eventarc.ListProvidersResponse, + ) -> eventarc.ListProvidersResponse: """Post-rpc interceptor for list_providers DEPRECATED. Please use the `post_list_providers_with_metadata` @@ -726,7 +878,11 @@ def post_list_providers(self, response: eventarc.ListProvidersResponse) -> event """ return response - def post_list_providers_with_metadata(self, response: eventarc.ListProvidersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_providers_with_metadata( + self, + response: eventarc.ListProvidersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_providers Override in a subclass to read or manipulate the response or metadata after it @@ -741,7 +897,11 @@ def post_list_providers_with_metadata(self, response: eventarc.ListProvidersResp """ return response, metadata - def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_triggers( + self, + request: eventarc.ListTriggersRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_triggers Override in a subclass to manipulate the request or metadata @@ -749,7 +909,10 @@ def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Seq """ return request, metadata - def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: + def post_list_triggers( + self, + response: eventarc.ListTriggersResponse, + ) -> eventarc.ListTriggersResponse: """Post-rpc interceptor for list_triggers DEPRECATED. Please use the `post_list_triggers_with_metadata` @@ -762,7 +925,11 @@ def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventar """ return response - def post_list_triggers_with_metadata(self, response: eventarc.ListTriggersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_triggers_with_metadata( + self, + response: eventarc.ListTriggersResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_triggers Override in a subclass to read or manipulate the response or metadata after it @@ -777,7 +944,11 @@ def post_list_triggers_with_metadata(self, response: eventarc.ListTriggersRespon """ return response, metadata - def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_channel( + self, + request: eventarc.UpdateChannelRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_channel Override in a subclass to manipulate the request or metadata @@ -785,7 +956,10 @@ def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: S """ return request, metadata - def post_update_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_channel( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_channel DEPRECATED. Please use the `post_update_channel_with_metadata` @@ -798,7 +972,11 @@ def post_update_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_update_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_channel_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_channel Override in a subclass to read or manipulate the response or metadata after it @@ -813,7 +991,11 @@ def post_update_channel_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_google_channel_config( + self, + request: eventarc.UpdateGoogleChannelConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_google_channel_config Override in a subclass to manipulate the request or metadata @@ -821,7 +1003,10 @@ def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannel """ return request, metadata - def post_update_google_channel_config(self, response: gce_google_channel_config.GoogleChannelConfig) -> gce_google_channel_config.GoogleChannelConfig: + def post_update_google_channel_config( + self, + response: gce_google_channel_config.GoogleChannelConfig, + ) -> gce_google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for update_google_channel_config DEPRECATED. Please use the `post_update_google_channel_config_with_metadata` @@ -834,7 +1019,11 @@ def post_update_google_channel_config(self, response: gce_google_channel_config. """ return response - def post_update_google_channel_config_with_metadata(self, response: gce_google_channel_config.GoogleChannelConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gce_google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_google_channel_config_with_metadata( + self, + response: gce_google_channel_config.GoogleChannelConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gce_google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_google_channel_config Override in a subclass to read or manipulate the response or metadata after it @@ -849,7 +1038,11 @@ def post_update_google_channel_config_with_metadata(self, response: gce_google_c """ return response, metadata - def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_trigger( + self, + request: eventarc.UpdateTriggerRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_trigger Override in a subclass to manipulate the request or metadata @@ -857,7 +1050,10 @@ def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: S """ return request, metadata - def post_update_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_trigger( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_trigger DEPRECATED. Please use the `post_update_trigger_with_metadata` @@ -870,7 +1066,11 @@ def post_update_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response - def post_update_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_trigger_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_trigger Override in a subclass to read or manipulate the response or metadata after it @@ -886,7 +1086,9 @@ def post_update_trigger_with_metadata(self, response: operations_pb2.Operation, return response, metadata def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location @@ -896,7 +1098,8 @@ def pre_get_location( return request, metadata def post_get_location( - self, response: locations_pb2.Location + self, + response: locations_pb2.Location, ) -> locations_pb2.Location: """Post-rpc interceptor for get_location @@ -907,7 +1110,9 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations @@ -917,7 +1122,8 @@ def pre_list_locations( return request, metadata def post_list_locations( - self, response: locations_pb2.ListLocationsResponse + self, + response: locations_pb2.ListLocationsResponse, ) -> locations_pb2.ListLocationsResponse: """Post-rpc interceptor for list_locations @@ -928,7 +1134,9 @@ def post_list_locations( return response def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_iam_policy @@ -938,7 +1146,8 @@ def pre_get_iam_policy( return request, metadata def post_get_iam_policy( - self, response: policy_pb2.Policy + self, + response: policy_pb2.Policy, ) -> policy_pb2.Policy: """Post-rpc interceptor for get_iam_policy @@ -949,7 +1158,9 @@ def post_get_iam_policy( return response def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for set_iam_policy @@ -959,7 +1170,8 @@ def pre_set_iam_policy( return request, metadata def post_set_iam_policy( - self, response: policy_pb2.Policy + self, + response: policy_pb2.Policy, ) -> policy_pb2.Policy: """Post-rpc interceptor for set_iam_policy @@ -970,7 +1182,9 @@ def post_set_iam_policy( return response def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for test_iam_permissions @@ -980,7 +1194,8 @@ def pre_test_iam_permissions( return request, metadata def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsResponse + self, + response: iam_policy_pb2.TestIamPermissionsResponse, ) -> iam_policy_pb2.TestIamPermissionsResponse: """Post-rpc interceptor for test_iam_permissions @@ -991,7 +1206,9 @@ def post_test_iam_permissions( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation @@ -1001,7 +1218,8 @@ def pre_cancel_operation( return request, metadata def post_cancel_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for cancel_operation @@ -1012,7 +1230,9 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation @@ -1022,7 +1242,8 @@ def pre_delete_operation( return request, metadata def post_delete_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for delete_operation @@ -1033,7 +1254,9 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation @@ -1043,7 +1266,8 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -1054,7 +1278,9 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations @@ -1064,7 +1290,8 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsResponse + self, + response: operations_pb2.ListOperationsResponse, ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -1096,29 +1323,30 @@ class EventarcRestTransport(_BaseEventarcRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[EventarcRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "eventarc.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[EventarcRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! Args: host (Optional[str]): - The hostname to connect to (default: 'eventarc.googleapis.com'). + The hostname to connect to (default: "eventarc.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -1157,10 +1385,9 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -1177,40 +1404,41 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -1229,27 +1457,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.CreateChannelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.CreateChannelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create channel method over HTTP. Args: @@ -1283,21 +1513,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseCreateChannel._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannel", "httpRequest": http_request, @@ -1306,7 +1536,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._CreateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._CreateChannel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1326,13 +1564,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannel", "metadata": http_response["headers"], @@ -1353,27 +1591,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.CreateChannelConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.CreateChannelConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create channel connection method over HTTP. Args: @@ -1407,21 +1647,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannelConnection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannelConnection", "httpRequest": http_request, @@ -1430,7 +1670,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._CreateChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._CreateChannelConnection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1450,13 +1698,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_connection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannelConnection", "metadata": http_response["headers"], @@ -1477,27 +1725,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.CreateTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.CreateTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create trigger method over HTTP. Args: @@ -1531,21 +1781,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateTrigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateTrigger", "httpRequest": http_request, @@ -1554,7 +1804,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._CreateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._CreateTrigger._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1574,13 +1832,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.create_trigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateTrigger", "metadata": http_response["headers"], @@ -1601,26 +1859,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.DeleteChannelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.DeleteChannelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete channel method over HTTP. Args: @@ -1652,21 +1912,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannel", "httpRequest": http_request, @@ -1675,7 +1935,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._DeleteChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._DeleteChannel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1695,13 +1962,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannel", "metadata": http_response["headers"], @@ -1722,26 +1989,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.DeleteChannelConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.DeleteChannelConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete channel connection method over HTTP. Args: @@ -1773,21 +2042,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannelConnection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannelConnection", "httpRequest": http_request, @@ -1796,7 +2065,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._DeleteChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._DeleteChannelConnection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1816,13 +2092,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannelConnection", "metadata": http_response["headers"], @@ -1843,26 +2119,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.DeleteTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.DeleteTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete trigger method over HTTP. Args: @@ -1894,21 +2172,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteTrigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteTrigger", "httpRequest": http_request, @@ -1917,7 +2195,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._DeleteTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._DeleteTrigger._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1937,13 +2222,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.delete_trigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteTrigger", "metadata": http_response["headers"], @@ -1964,26 +2249,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetChannelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> channel.Channel: + def __call__( + self, + request: eventarc.GetChannelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel.Channel: r"""Call the get channel method over HTTP. Args: @@ -2020,21 +2307,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannel", "httpRequest": http_request, @@ -2043,7 +2330,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetChannel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2065,13 +2359,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannel", "metadata": http_response["headers"], @@ -2092,26 +2386,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetChannelConnectionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> channel_connection.ChannelConnection: + def __call__( + self, + request: eventarc.GetChannelConnectionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> channel_connection.ChannelConnection: r"""Call the get channel connection method over HTTP. Args: @@ -2147,21 +2443,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannelConnection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannelConnection", "httpRequest": http_request, @@ -2170,7 +2466,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetChannelConnection._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2192,13 +2495,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel_connection", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannelConnection", "metadata": http_response["headers"], @@ -2219,26 +2522,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetGoogleChannelConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> google_channel_config.GoogleChannelConfig: + def __call__( + self, + request: eventarc.GetGoogleChannelConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> google_channel_config.GoogleChannelConfig: r"""Call the get google channel config method over HTTP. Args: @@ -2275,21 +2580,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleChannelConfig", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetGoogleChannelConfig", "httpRequest": http_request, @@ -2298,7 +2603,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetGoogleChannelConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2320,13 +2632,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetGoogleChannelConfig", "metadata": http_response["headers"], @@ -2347,26 +2659,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetProviderRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> discovery.Provider: + def __call__( + self, + request: eventarc.GetProviderRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> discovery.Provider: r"""Call the get provider method over HTTP. Args: @@ -2397,21 +2711,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetProvider", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetProvider", "httpRequest": http_request, @@ -2420,7 +2734,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetProvider._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetProvider._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2442,13 +2763,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_provider", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetProvider", "metadata": http_response["headers"], @@ -2469,26 +2790,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.GetTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> trigger.Trigger: + def __call__( + self, + request: eventarc.GetTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> trigger.Trigger: r"""Call the get trigger method over HTTP. Args: @@ -2519,21 +2842,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetTrigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetTrigger", "httpRequest": http_request, @@ -2542,7 +2865,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetTrigger._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2564,13 +2894,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.get_trigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetTrigger", "metadata": http_response["headers"], @@ -2591,26 +2921,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.ListChannelConnectionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListChannelConnectionsResponse: + def __call__( + self, + request: eventarc.ListChannelConnectionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> eventarc.ListChannelConnectionsResponse: r"""Call the list channel connections method over HTTP. Args: @@ -2641,21 +2973,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannelConnections", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannelConnections", "httpRequest": http_request, @@ -2664,7 +2996,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListChannelConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListChannelConnections._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2686,13 +3025,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.list_channel_connections", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannelConnections", "metadata": http_response["headers"], @@ -2713,26 +3052,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.ListChannelsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListChannelsResponse: + def __call__( + self, + request: eventarc.ListChannelsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> eventarc.ListChannelsResponse: r"""Call the list channels method over HTTP. Args: @@ -2761,21 +3102,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseListChannels._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannels", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannels", "httpRequest": http_request, @@ -2784,7 +3125,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListChannels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListChannels._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2806,13 +3154,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.list_channels", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannels", "metadata": http_response["headers"], @@ -2833,26 +3181,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.ListProvidersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListProvidersResponse: + def __call__( + self, + request: eventarc.ListProvidersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> eventarc.ListProvidersResponse: r"""Call the list providers method over HTTP. Args: @@ -2881,21 +3231,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseListProviders._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListProviders", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListProviders", "httpRequest": http_request, @@ -2904,7 +3254,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListProviders._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListProviders._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2926,13 +3283,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.list_providers", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListProviders", "metadata": http_response["headers"], @@ -2953,26 +3310,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: eventarc.ListTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> eventarc.ListTriggersResponse: + def __call__( + self, + request: eventarc.ListTriggersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> eventarc.ListTriggersResponse: r"""Call the list triggers method over HTTP. Args: @@ -3001,21 +3360,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListTriggers", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListTriggers", "httpRequest": http_request, @@ -3024,7 +3383,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListTriggers._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3046,13 +3412,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.list_triggers", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListTriggers", "metadata": http_response["headers"], @@ -3073,27 +3439,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.UpdateChannelRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.UpdateChannelRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update channel method over HTTP. Args: @@ -3127,21 +3495,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateChannel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateChannel", "httpRequest": http_request, @@ -3150,7 +3518,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._UpdateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._UpdateChannel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3170,13 +3546,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.update_channel", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateChannel", "metadata": http_response["headers"], @@ -3197,29 +3573,30 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.UpdateGoogleChannelConfigRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> gce_google_channel_config.GoogleChannelConfig: - r"""Call the update google channel - config method over HTTP. + def __call__( + self, + request: eventarc.UpdateGoogleChannelConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: + r"""Call the update google channel config method over HTTP. Args: request (~.eventarc.UpdateGoogleChannelConfigRequest): @@ -3257,21 +3634,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleChannelConfig", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateGoogleChannelConfig", "httpRequest": http_request, @@ -3280,7 +3657,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3302,13 +3687,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateGoogleChannelConfig", "metadata": http_response["headers"], @@ -3329,27 +3714,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: eventarc.UpdateTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: eventarc.UpdateTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update trigger method over HTTP. Args: @@ -3383,21 +3770,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseUpdateTrigger._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateTrigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateTrigger", "httpRequest": http_request, @@ -3406,7 +3793,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._UpdateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._UpdateTrigger._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3426,13 +3821,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcClient.update_trigger", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateTrigger", "metadata": http_response["headers"], @@ -3441,153 +3836,189 @@ def __call__(self, ) return resp + # fmt: off @property - def create_channel_(self) -> Callable[ - [eventarc.CreateChannelRequest], - operations_pb2.Operation]: + def create_channel_( + self + ) -> Callable[[eventarc.CreateChannelRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateChannel(self._session, self._host, self._interceptor) # type: ignore + return self._CreateChannel(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def create_channel_connection(self) -> Callable[ - [eventarc.CreateChannelConnectionRequest], - operations_pb2.Operation]: + def create_channel_connection( + self + ) -> Callable[[eventarc.CreateChannelConnectionRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._CreateChannelConnection(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def create_trigger(self) -> Callable[ - [eventarc.CreateTriggerRequest], - operations_pb2.Operation]: + def create_trigger( + self + ) -> Callable[[eventarc.CreateTriggerRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def delete_channel(self) -> Callable[ - [eventarc.DeleteChannelRequest], - operations_pb2.Operation]: + def delete_channel( + self + ) -> Callable[[eventarc.DeleteChannelRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteChannel(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteChannel(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def delete_channel_connection(self) -> Callable[ - [eventarc.DeleteChannelConnectionRequest], - operations_pb2.Operation]: + def delete_channel_connection( + self + ) -> Callable[[eventarc.DeleteChannelConnectionRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteChannelConnection(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def delete_trigger(self) -> Callable[ - [eventarc.DeleteTriggerRequest], - operations_pb2.Operation]: + def delete_trigger( + self + ) -> Callable[[eventarc.DeleteTriggerRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_channel(self) -> Callable[ - [eventarc.GetChannelRequest], - channel.Channel]: + def get_channel( + self + ) -> Callable[[eventarc.GetChannelRequest], channel.Channel]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetChannel(self._session, self._host, self._interceptor) # type: ignore + return self._GetChannel(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_channel_connection(self) -> Callable[ - [eventarc.GetChannelConnectionRequest], - channel_connection.ChannelConnection]: + def get_channel_connection( + self + ) -> Callable[[eventarc.GetChannelConnectionRequest], channel_connection.ChannelConnection]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_google_channel_config(self) -> Callable[ - [eventarc.GetGoogleChannelConfigRequest], - google_channel_config.GoogleChannelConfig]: + def get_google_channel_config( + self + ) -> Callable[[eventarc.GetGoogleChannelConfigRequest], google_channel_config.GoogleChannelConfig]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + return self._GetGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_provider(self) -> Callable[ - [eventarc.GetProviderRequest], - discovery.Provider]: + def get_provider( + self + ) -> Callable[[eventarc.GetProviderRequest], discovery.Provider]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetProvider(self._session, self._host, self._interceptor) # type: ignore + return self._GetProvider(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_trigger(self) -> Callable[ - [eventarc.GetTriggerRequest], - trigger.Trigger]: + def get_trigger( + self + ) -> Callable[[eventarc.GetTriggerRequest], trigger.Trigger]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_channel_connections(self) -> Callable[ - [eventarc.ListChannelConnectionsRequest], - eventarc.ListChannelConnectionsResponse]: + def list_channel_connections( + self + ) -> Callable[[eventarc.ListChannelConnectionsRequest], eventarc.ListChannelConnectionsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListChannelConnections(self._session, self._host, self._interceptor) # type: ignore + return self._ListChannelConnections(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_channels(self) -> Callable[ - [eventarc.ListChannelsRequest], - eventarc.ListChannelsResponse]: + def list_channels( + self + ) -> Callable[[eventarc.ListChannelsRequest], eventarc.ListChannelsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore + return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_providers(self) -> Callable[ - [eventarc.ListProvidersRequest], - eventarc.ListProvidersResponse]: + def list_providers( + self + ) -> Callable[[eventarc.ListProvidersRequest], eventarc.ListProvidersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListProviders(self._session, self._host, self._interceptor) # type: ignore + return self._ListProviders(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_triggers(self) -> Callable[ - [eventarc.ListTriggersRequest], - eventarc.ListTriggersResponse]: + def list_triggers( + self + ) -> Callable[[eventarc.ListTriggersRequest], eventarc.ListTriggersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore + return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def update_channel(self) -> Callable[ - [eventarc.UpdateChannelRequest], - operations_pb2.Operation]: + def update_channel( + self + ) -> Callable[[eventarc.UpdateChannelRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def update_google_channel_config(self) -> Callable[ - [eventarc.UpdateGoogleChannelConfigRequest], - gce_google_channel_config.GoogleChannelConfig]: + def update_google_channel_config( + self + ) -> Callable[[eventarc.UpdateGoogleChannelConfigRequest], gce_google_channel_config.GoogleChannelConfig]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def update_trigger(self) -> Callable[ - [eventarc.UpdateTriggerRequest], - operations_pb2.Operation]: + def update_trigger( + self + ) -> Callable[[eventarc.UpdateTriggerRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore + # fmt: on @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseEventarcRestTransport._BaseGetLocation, EventarcRestStub): def __hash__(self): @@ -3601,27 +4032,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -3648,21 +4080,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetLocation", "httpRequest": http_request, @@ -3671,7 +4103,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3689,12 +4128,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetLocation", "httpResponse": http_response, @@ -3705,7 +4144,7 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseEventarcRestTransport._BaseListLocations, EventarcRestStub): def __hash__(self): @@ -3719,27 +4158,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -3766,21 +4206,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseListLocations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListLocations", "httpRequest": http_request, @@ -3789,7 +4229,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3807,12 +4254,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListLocations", "httpResponse": http_response, @@ -3823,7 +4270,7 @@ def __call__(self, @property def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore class _GetIamPolicy(_BaseEventarcRestTransport._BaseGetIamPolicy, EventarcRestStub): def __hash__(self): @@ -3837,27 +4284,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: iam_policy_pb2.GetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: r"""Call the get iam policy method over HTTP. Args: @@ -3884,21 +4332,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetIamPolicy", "httpRequest": http_request, @@ -3907,7 +4355,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3925,12 +4380,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetIamPolicy", "httpResponse": http_response, @@ -3941,7 +4396,7 @@ def __call__(self, @property def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore class _SetIamPolicy(_BaseEventarcRestTransport._BaseSetIamPolicy, EventarcRestStub): def __hash__(self): @@ -3955,28 +4410,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: iam_policy_pb2.SetIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> policy_pb2.Policy: - + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: r"""Call the set iam policy method over HTTP. Args: @@ -4005,21 +4461,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.SetIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "SetIamPolicy", "httpRequest": http_request, @@ -4028,7 +4484,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4046,12 +4510,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.SetIamPolicy", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "SetIamPolicy", "httpResponse": http_response, @@ -4062,7 +4526,7 @@ def __call__(self, @property def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore class _TestIamPermissions(_BaseEventarcRestTransport._BaseTestIamPermissions, EventarcRestStub): def __hash__(self): @@ -4076,28 +4540,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: iam_policy_pb2.TestIamPermissionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Call the test iam permissions method over HTTP. Args: @@ -4126,21 +4591,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.TestIamPermissions", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "TestIamPermissions", "httpRequest": http_request, @@ -4149,7 +4614,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4167,12 +4640,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.TestIamPermissions", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "TestIamPermissions", "httpResponse": http_response, @@ -4183,7 +4656,7 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseEventarcRestTransport._BaseCancelOperation, EventarcRestStub): def __hash__(self): @@ -4197,28 +4670,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -4244,21 +4718,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -4267,7 +4741,15 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = EventarcRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4278,7 +4760,7 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseEventarcRestTransport._BaseDeleteOperation, EventarcRestStub): def __hash__(self): @@ -4292,27 +4774,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -4336,21 +4819,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -4359,7 +4842,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4370,7 +4860,7 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseEventarcRestTransport._BaseGetOperation, EventarcRestStub): def __hash__(self): @@ -4384,27 +4874,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -4431,21 +4922,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetOperation", "httpRequest": http_request, @@ -4454,7 +4945,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4472,12 +4970,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetOperation", "httpResponse": http_response, @@ -4488,7 +4986,7 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseEventarcRestTransport._BaseListOperations, EventarcRestStub): def __hash__(self): @@ -4502,27 +5000,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -4549,21 +5048,21 @@ def __call__(self, query_params = _BaseEventarcRestTransport._BaseListOperations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListOperations", "httpRequest": http_request, @@ -4572,7 +5071,14 @@ def __call__(self, ) # Send the request - response = EventarcRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = EventarcRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -4590,12 +5096,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListOperations", "httpResponse": http_response, @@ -4612,6 +5118,4 @@ def close(self): self._session.close() -__all__=( - 'EventarcRestTransport', -) +__all__ = ("EventarcRestTransport",) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py index 6ff0278064..42817f85a6 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py @@ -20,7 +20,7 @@ from google.protobuf import json_format from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO import re @@ -50,18 +50,20 @@ class _BaseEventarcRestTransport(EventarcTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'eventarc.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "eventarc.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'eventarc.googleapis.com'). + The hostname to connect to (default: "eventarc.googleapis.com"). credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -92,15 +94,19 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "channelId" : "", "validateOnly" : False, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelId": "", + "validateOnly": False, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -108,11 +114,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/channels', - 'body': 'channel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/channels", + "body": "channel", + }, ] return http_options @@ -127,16 +134,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseCreateChannel._get_unset_required_fields(query_params)) return query_params @@ -145,8 +155,11 @@ class _BaseCreateChannelConnection: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "channelConnectionId" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelConnectionId": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -154,11 +167,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', - 'body': 'channel_connection', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/channelConnections", + "body": "channel_connection", + }, ] return http_options @@ -173,16 +187,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseCreateChannelConnection._get_unset_required_fields(query_params)) return query_params @@ -191,8 +208,12 @@ class _BaseCreateTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "triggerId" : "", "validateOnly" : False, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "triggerId": "", + "validateOnly": False, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -200,11 +221,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - 'body': 'trigger', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/triggers", + "body": "trigger", + }, ] return http_options @@ -219,16 +241,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseCreateTrigger._get_unset_required_fields(query_params)) return query_params @@ -237,8 +262,11 @@ class _BaseDeleteChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly": False, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -246,10 +274,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/channels/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/channels/*}", + }, ] return http_options @@ -261,10 +290,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseDeleteChannel._get_unset_required_fields(query_params)) return query_params @@ -273,8 +304,10 @@ class _BaseDeleteChannelConnection: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -282,10 +315,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/channelConnections/*}", + }, ] return http_options @@ -297,10 +331,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseDeleteChannelConnection._get_unset_required_fields(query_params)) return query_params @@ -309,8 +345,11 @@ class _BaseDeleteTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly": False, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -318,10 +357,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/triggers/*}", + }, ] return http_options @@ -333,10 +373,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseDeleteTrigger._get_unset_required_fields(query_params)) return query_params @@ -345,8 +387,10 @@ class _BaseGetChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -354,10 +398,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/channels/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/channels/*}", + }, ] return http_options @@ -369,10 +414,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseGetChannel._get_unset_required_fields(query_params)) return query_params @@ -381,8 +428,10 @@ class _BaseGetChannelConnection: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -390,10 +439,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/channelConnections/*}", + }, ] return http_options @@ -405,10 +455,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseGetChannelConnection._get_unset_required_fields(query_params)) return query_params @@ -417,8 +469,10 @@ class _BaseGetGoogleChannelConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -426,10 +480,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/googleChannelConfig}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/googleChannelConfig}", + }, ] return http_options @@ -441,10 +496,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_unset_required_fields(query_params)) return query_params @@ -453,8 +510,10 @@ class _BaseGetProvider: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -462,10 +521,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/providers/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/providers/*}", + }, ] return http_options @@ -477,10 +537,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseGetProvider._get_unset_required_fields(query_params)) return query_params @@ -489,8 +551,10 @@ class _BaseGetTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -498,10 +562,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/triggers/*}", + }, ] return http_options @@ -513,10 +578,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseGetTrigger._get_unset_required_fields(query_params)) return query_params @@ -525,8 +592,10 @@ class _BaseListChannelConnections: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -534,10 +603,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/channelConnections", + }, ] return http_options @@ -549,10 +619,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseListChannelConnections._get_unset_required_fields(query_params)) return query_params @@ -561,8 +633,10 @@ class _BaseListChannels: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -570,10 +644,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/channels', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/channels", + }, ] return http_options @@ -585,10 +660,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseListChannels._get_unset_required_fields(query_params)) return query_params @@ -597,8 +674,10 @@ class _BaseListProviders: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -606,10 +685,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/providers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/providers", + }, ] return http_options @@ -621,10 +701,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseListProviders._get_unset_required_fields(query_params)) return query_params @@ -633,8 +715,10 @@ class _BaseListTriggers: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -642,10 +726,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/triggers", + }, ] return http_options @@ -657,10 +742,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseListTriggers._get_unset_required_fields(query_params)) return query_params @@ -669,8 +756,11 @@ class _BaseUpdateChannel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly": False, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -678,11 +768,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{channel.name=projects/*/locations/*/channels/*}', - 'body': 'channel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{channel.name=projects/*/locations/*/channels/*}", + "body": "channel", + }, ] return http_options @@ -697,16 +788,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseUpdateChannel._get_unset_required_fields(query_params)) return query_params @@ -715,8 +809,10 @@ class _BaseUpdateGoogleChannelConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -724,11 +820,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}', - 'body': 'google_channel_config', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}", + "body": "google_channel_config", + }, ] return http_options @@ -743,16 +840,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_unset_required_fields(query_params)) return query_params @@ -761,8 +861,11 @@ class _BaseUpdateTrigger: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly": False, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -770,11 +873,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{trigger.name=projects/*/locations/*/triggers/*}', - 'body': 'trigger', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{trigger.name=projects/*/locations/*/triggers/*}", + "body": "trigger", + }, ] return http_options @@ -789,16 +893,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseEventarcRestTransport._BaseUpdateTrigger._get_unset_required_fields(query_params)) return query_params @@ -809,10 +916,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @@ -820,12 +928,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -834,10 +944,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @@ -845,12 +956,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetIamPolicy: @@ -859,18 +972,19 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy', - }, - { - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy", + }, ] return http_options @@ -878,12 +992,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseSetIamPolicy: @@ -892,21 +1008,22 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy", + "body": "*", + }, ] return http_options @@ -914,16 +1031,19 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseTestIamPermissions: @@ -932,21 +1052,22 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions', - 'body': '*', - }, - { - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions", + "body": "*", + }, ] return http_options @@ -954,16 +1075,19 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -972,11 +1096,12 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @@ -984,16 +1109,19 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -1002,10 +1130,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -1013,12 +1142,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -1027,10 +1158,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -1038,12 +1170,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -1052,10 +1186,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options @@ -1063,15 +1198,15 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseEventarcRestTransport', -) +__all__ = ("_BaseEventarcRestTransport",) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py index 0521e2f2f2..9ee50b117e 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -64,41 +64,41 @@ ) __all__ = ( - 'Channel', - 'ChannelConnection', - 'EventType', - 'FilteringAttribute', - 'Provider', - 'CreateChannelConnectionRequest', - 'CreateChannelRequest', - 'CreateTriggerRequest', - 'DeleteChannelConnectionRequest', - 'DeleteChannelRequest', - 'DeleteTriggerRequest', - 'GetChannelConnectionRequest', - 'GetChannelRequest', - 'GetGoogleChannelConfigRequest', - 'GetProviderRequest', - 'GetTriggerRequest', - 'ListChannelConnectionsRequest', - 'ListChannelConnectionsResponse', - 'ListChannelsRequest', - 'ListChannelsResponse', - 'ListProvidersRequest', - 'ListProvidersResponse', - 'ListTriggersRequest', - 'ListTriggersResponse', - 'OperationMetadata', - 'UpdateChannelRequest', - 'UpdateGoogleChannelConfigRequest', - 'UpdateTriggerRequest', - 'GoogleChannelConfig', - 'CloudRun', - 'Destination', - 'EventFilter', - 'GKE', - 'Pubsub', - 'StateCondition', - 'Transport', - 'Trigger', + "Channel", + "ChannelConnection", + "EventType", + "FilteringAttribute", + "Provider", + "CreateChannelConnectionRequest", + "CreateChannelRequest", + "CreateTriggerRequest", + "DeleteChannelConnectionRequest", + "DeleteChannelRequest", + "DeleteTriggerRequest", + "GetChannelConnectionRequest", + "GetChannelRequest", + "GetGoogleChannelConfigRequest", + "GetProviderRequest", + "GetTriggerRequest", + "ListChannelConnectionsRequest", + "ListChannelConnectionsResponse", + "ListChannelsRequest", + "ListChannelsResponse", + "ListProvidersRequest", + "ListProvidersResponse", + "ListTriggersRequest", + "ListTriggersResponse", + "OperationMetadata", + "UpdateChannelRequest", + "UpdateGoogleChannelConfigRequest", + "UpdateTriggerRequest", + "GoogleChannelConfig", + "CloudRun", + "Destination", + "EventFilter", + "GKE", + "Pubsub", + "StateCondition", + "Transport", + "Trigger", ) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py index 0c3d920b1a..ae83b9d9fc 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py @@ -22,12 +22,14 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'Channel', + "Channel", }, ) +# fmt: on class Channel(proto.Message): @@ -79,6 +81,7 @@ class Channel(proto.Message): It must match the pattern ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. """ + class State(proto.Enum): r"""State lists all the possible states of a Channel @@ -140,7 +143,7 @@ class State(proto.Enum): pubsub_topic: str = proto.Field( proto.STRING, number=8, - oneof='transport', + oneof="transport", ) state: State = proto.Field( proto.ENUM, diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py index 5279252f19..cbd98b849b 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py @@ -22,12 +22,14 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'ChannelConnection', + "ChannelConnection", }, ) +# fmt: on class ChannelConnection(proto.Message): diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py index 430bae6e41..0848961a88 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py @@ -20,14 +20,16 @@ import proto # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'Provider', - 'EventType', - 'FilteringAttribute', + "Provider", + "EventType", + "FilteringAttribute", }, ) +# fmt: on class Provider(proto.Message): @@ -53,10 +55,10 @@ class Provider(proto.Message): proto.STRING, number=2, ) - event_types: MutableSequence['EventType'] = proto.RepeatedField( + event_types: MutableSequence["EventType"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='EventType', + message="EventType", ) @@ -95,10 +97,10 @@ class EventType(proto.Message): proto.STRING, number=2, ) - filtering_attributes: MutableSequence['FilteringAttribute'] = proto.RepeatedField( + filtering_attributes: MutableSequence["FilteringAttribute"] = proto.RepeatedField( proto.MESSAGE, number=3, - message='FilteringAttribute', + message="FilteringAttribute", ) event_schema_uri: str = proto.Field( proto.STRING, diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index 4f1d3c88bf..dba4477d25 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -28,34 +28,36 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'GetTriggerRequest', - 'ListTriggersRequest', - 'ListTriggersResponse', - 'CreateTriggerRequest', - 'UpdateTriggerRequest', - 'DeleteTriggerRequest', - 'GetChannelRequest', - 'ListChannelsRequest', - 'ListChannelsResponse', - 'CreateChannelRequest', - 'UpdateChannelRequest', - 'DeleteChannelRequest', - 'GetProviderRequest', - 'ListProvidersRequest', - 'ListProvidersResponse', - 'GetChannelConnectionRequest', - 'ListChannelConnectionsRequest', - 'ListChannelConnectionsResponse', - 'CreateChannelConnectionRequest', - 'DeleteChannelConnectionRequest', - 'UpdateGoogleChannelConfigRequest', - 'GetGoogleChannelConfigRequest', - 'OperationMetadata', + "GetTriggerRequest", + "ListTriggersRequest", + "ListTriggersResponse", + "CreateTriggerRequest", + "UpdateTriggerRequest", + "DeleteTriggerRequest", + "GetChannelRequest", + "ListChannelsRequest", + "ListChannelsResponse", + "CreateChannelRequest", + "UpdateChannelRequest", + "DeleteChannelRequest", + "GetProviderRequest", + "ListProvidersRequest", + "ListProvidersResponse", + "GetChannelConnectionRequest", + "ListChannelConnectionsRequest", + "ListChannelConnectionsResponse", + "CreateChannelConnectionRequest", + "DeleteChannelConnectionRequest", + "UpdateGoogleChannelConfigRequest", + "GetGoogleChannelConfigRequest", + "OperationMetadata", }, ) +# fmt: on class GetTriggerRequest(proto.Message): diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py index 2e17f40ae1..e52270ea27 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py @@ -22,12 +22,14 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'GoogleChannelConfig', + "GoogleChannelConfig", }, ) +# fmt: on class GoogleChannelConfig(proto.Message): diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index 83174e8cce..f8f7b280ef 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -23,19 +23,21 @@ from google.rpc import code_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.eventarc.v1', + package="google.cloud.eventarc.v1", manifest={ - 'Trigger', - 'EventFilter', - 'StateCondition', - 'Destination', - 'Transport', - 'CloudRun', - 'GKE', - 'Pubsub', + "Trigger", + "EventFilter", + "StateCondition", + "Destination", + "Transport", + "CloudRun", + "GKE", + "Pubsub", }, ) +# fmt: on class Trigger(proto.Message): @@ -126,24 +128,24 @@ class Trigger(proto.Message): number=6, message=timestamp_pb2.Timestamp, ) - event_filters: MutableSequence['EventFilter'] = proto.RepeatedField( + event_filters: MutableSequence["EventFilter"] = proto.RepeatedField( proto.MESSAGE, number=8, - message='EventFilter', + message="EventFilter", ) service_account: str = proto.Field( proto.STRING, number=9, ) - destination: 'Destination' = proto.Field( + destination: "Destination" = proto.Field( proto.MESSAGE, number=10, - message='Destination', + message="Destination", ) - transport: 'Transport' = proto.Field( + transport: "Transport" = proto.Field( proto.MESSAGE, number=11, - message='Transport', + message="Transport", ) labels: MutableMapping[str, str] = proto.MapField( proto.STRING, @@ -154,11 +156,11 @@ class Trigger(proto.Message): proto.STRING, number=13, ) - conditions: MutableMapping[str, 'StateCondition'] = proto.MapField( + conditions: MutableMapping[str, "StateCondition"] = proto.MapField( proto.STRING, proto.MESSAGE, number=15, - message='StateCondition', + message="StateCondition", ) etag: str = proto.Field( proto.STRING, @@ -260,27 +262,27 @@ class Destination(proto.Message): This field is a member of `oneof`_ ``descriptor``. """ - cloud_run: 'CloudRun' = proto.Field( + cloud_run: "CloudRun" = proto.Field( proto.MESSAGE, number=1, - oneof='descriptor', - message='CloudRun', + oneof="descriptor", + message="CloudRun", ) cloud_function: str = proto.Field( proto.STRING, number=2, - oneof='descriptor', + oneof="descriptor", ) - gke: 'GKE' = proto.Field( + gke: "GKE" = proto.Field( proto.MESSAGE, number=3, - oneof='descriptor', - message='GKE', + oneof="descriptor", + message="GKE", ) workflow: str = proto.Field( proto.STRING, number=4, - oneof='descriptor', + oneof="descriptor", ) @@ -299,11 +301,11 @@ class Transport(proto.Message): This field is a member of `oneof`_ ``intermediary``. """ - pubsub: 'Pubsub' = proto.Field( + pubsub: "Pubsub" = proto.Field( proto.MESSAGE, number=1, - oneof='intermediary', - message='Pubsub', + oneof="intermediary", + message="Pubsub", ) diff --git a/tests/integration/goldens/eventarc/noxfile.py b/tests/integration/goldens/eventarc/noxfile.py index 8aa9dd8a39..cae92ce697 100755 --- a/tests/integration/goldens/eventarc/noxfile.py +++ b/tests/integration/goldens/eventarc/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -FORMAT_PATHS = ["google", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # We're most interested in ensuring that code is formatted properly diff --git a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 88e728b9ca..1af275c587 100755 --- a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -995,11 +995,11 @@ def test_get_trigger(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', + name="name_value", + uid="uid_value", + service_account="service_account_value", + channel="channel_value", + etag="etag_value", ) response = client.get_trigger(request) @@ -1011,11 +1011,11 @@ def test_get_trigger(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.service_account == "service_account_value" + assert response.channel == "channel_value" + assert response.etag == "etag_value" def test_get_trigger_non_empty_request_with_auto_populated_field(): @@ -1030,7 +1030,7 @@ def test_get_trigger_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetTriggerRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1042,7 +1042,7 @@ def test_get_trigger_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetTriggerRequest( - name='name_value', + name="name_value", ) def test_get_trigger_use_cached_wrapped_rpc(): @@ -1128,11 +1128,11 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', + name="name_value", + uid="uid_value", + service_account="service_account_value", + channel="channel_value", + etag="etag_value", )) response = await client.get_trigger(request) @@ -1144,11 +1144,11 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e # Establish that the response is the type that we expect. assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.service_account == "service_account_value" + assert response.channel == "channel_value" + assert response.etag == "etag_value" @pytest.mark.asyncio @@ -1164,7 +1164,7 @@ def test_get_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetTriggerRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1196,7 +1196,7 @@ async def test_get_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetTriggerRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1232,7 +1232,7 @@ def test_get_trigger_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_trigger( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1240,7 +1240,7 @@ def test_get_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -1254,7 +1254,7 @@ def test_get_trigger_flattened_error(): with pytest.raises(ValueError): client.get_trigger( eventarc.GetTriggerRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -1274,7 +1274,7 @@ async def test_get_trigger_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_trigger( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1282,7 +1282,7 @@ async def test_get_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -1296,7 +1296,7 @@ async def test_get_trigger_flattened_error_async(): with pytest.raises(ValueError): await client.get_trigger( eventarc.GetTriggerRequest(), - name='name_value', + name="name_value", ) @@ -1320,8 +1320,8 @@ def test_list_triggers(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_triggers(request) @@ -1333,8 +1333,8 @@ def test_list_triggers(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTriggersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_triggers_non_empty_request_with_auto_populated_field(): @@ -1349,10 +1349,10 @@ def test_list_triggers_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.ListTriggersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1364,10 +1364,10 @@ def test_list_triggers_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListTriggersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", ) def test_list_triggers_use_cached_wrapped_rpc(): @@ -1453,8 +1453,8 @@ async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) response = await client.list_triggers(request) @@ -1466,8 +1466,8 @@ async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -1483,7 +1483,7 @@ def test_list_triggers_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListTriggersRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1515,7 +1515,7 @@ async def test_list_triggers_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListTriggersRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1551,7 +1551,7 @@ def test_list_triggers_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_triggers( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1559,7 +1559,7 @@ def test_list_triggers_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1573,7 +1573,7 @@ def test_list_triggers_flattened_error(): with pytest.raises(ValueError): client.list_triggers( eventarc.ListTriggersRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -1593,7 +1593,7 @@ async def test_list_triggers_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_triggers( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1601,7 +1601,7 @@ async def test_list_triggers_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -1615,7 +1615,7 @@ async def test_list_triggers_flattened_error_async(): with pytest.raises(ValueError): await client.list_triggers( eventarc.ListTriggersRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1859,8 +1859,8 @@ def test_create_trigger_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.CreateTriggerRequest( - parent='parent_value', - trigger_id='trigger_id_value', + parent="parent_value", + trigger_id="trigger_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1872,8 +1872,8 @@ def test_create_trigger_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateTriggerRequest( - parent='parent_value', - trigger_id='trigger_id_value', + parent="parent_value", + trigger_id="trigger_id_value", ) def test_create_trigger_use_cached_wrapped_rpc(): @@ -1996,7 +1996,7 @@ def test_create_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.CreateTriggerRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2028,7 +2028,7 @@ async def test_create_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.CreateTriggerRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2064,9 +2064,9 @@ def test_create_trigger_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_trigger( - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) # Establish that the underlying call was made with the expected @@ -2074,13 +2074,13 @@ def test_create_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].trigger - mock_val = gce_trigger.Trigger(name='name_value') + mock_val = gce_trigger.Trigger(name="name_value") assert arg == mock_val arg = args[0].trigger_id - mock_val = 'trigger_id_value' + mock_val = "trigger_id_value" assert arg == mock_val @@ -2094,9 +2094,9 @@ def test_create_trigger_flattened_error(): with pytest.raises(ValueError): client.create_trigger( eventarc.CreateTriggerRequest(), - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) @pytest.mark.asyncio @@ -2118,9 +2118,9 @@ async def test_create_trigger_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_trigger( - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) # Establish that the underlying call was made with the expected @@ -2128,13 +2128,13 @@ async def test_create_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].trigger - mock_val = gce_trigger.Trigger(name='name_value') + mock_val = gce_trigger.Trigger(name="name_value") assert arg == mock_val arg = args[0].trigger_id - mock_val = 'trigger_id_value' + mock_val = "trigger_id_value" assert arg == mock_val @pytest.mark.asyncio @@ -2148,9 +2148,9 @@ async def test_create_trigger_flattened_error_async(): with pytest.raises(ValueError): await client.create_trigger( eventarc.CreateTriggerRequest(), - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) @@ -2331,7 +2331,7 @@ def test_update_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.UpdateTriggerRequest() - request.trigger.name = 'name_value' + request.trigger.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2363,7 +2363,7 @@ async def test_update_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.UpdateTriggerRequest() - request.trigger.name = 'name_value' + request.trigger.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2399,8 +2399,8 @@ def test_update_trigger_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_trigger( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) @@ -2409,10 +2409,10 @@ def test_update_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].trigger - mock_val = gce_trigger.Trigger(name='name_value') + mock_val = gce_trigger.Trigger(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].allow_missing mock_val = True @@ -2429,8 +2429,8 @@ def test_update_trigger_flattened_error(): with pytest.raises(ValueError): client.update_trigger( eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) @@ -2453,8 +2453,8 @@ async def test_update_trigger_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_trigger( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) @@ -2463,10 +2463,10 @@ async def test_update_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].trigger - mock_val = gce_trigger.Trigger(name='name_value') + mock_val = gce_trigger.Trigger(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].allow_missing mock_val = True @@ -2483,8 +2483,8 @@ async def test_update_trigger_flattened_error_async(): with pytest.raises(ValueError): await client.update_trigger( eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) @@ -2533,8 +2533,8 @@ def test_delete_trigger_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.DeleteTriggerRequest( - name='name_value', - etag='etag_value', + name="name_value", + etag="etag_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2546,8 +2546,8 @@ def test_delete_trigger_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteTriggerRequest( - name='name_value', - etag='etag_value', + name="name_value", + etag="etag_value", ) def test_delete_trigger_use_cached_wrapped_rpc(): @@ -2670,7 +2670,7 @@ def test_delete_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.DeleteTriggerRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2702,7 +2702,7 @@ async def test_delete_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.DeleteTriggerRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2738,7 +2738,7 @@ def test_delete_trigger_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_trigger( - name='name_value', + name="name_value", allow_missing=True, ) @@ -2747,7 +2747,7 @@ def test_delete_trigger_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].allow_missing mock_val = True @@ -2764,7 +2764,7 @@ def test_delete_trigger_flattened_error(): with pytest.raises(ValueError): client.delete_trigger( eventarc.DeleteTriggerRequest(), - name='name_value', + name="name_value", allow_missing=True, ) @@ -2787,7 +2787,7 @@ async def test_delete_trigger_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_trigger( - name='name_value', + name="name_value", allow_missing=True, ) @@ -2796,7 +2796,7 @@ async def test_delete_trigger_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].allow_missing mock_val = True @@ -2813,7 +2813,7 @@ async def test_delete_trigger_flattened_error_async(): with pytest.raises(ValueError): await client.delete_trigger( eventarc.DeleteTriggerRequest(), - name='name_value', + name="name_value", allow_missing=True, ) @@ -2838,13 +2838,13 @@ def test_get_channel(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', + name="name_value", + uid="uid_value", + provider="provider_value", state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - pubsub_topic='pubsub_topic_value', + activation_token="activation_token_value", + crypto_key_name="crypto_key_name_value", + pubsub_topic="pubsub_topic_value", ) response = client.get_channel(request) @@ -2856,12 +2856,12 @@ def test_get_channel(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.provider == "provider_value" assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.activation_token == "activation_token_value" + assert response.crypto_key_name == "crypto_key_name_value" def test_get_channel_non_empty_request_with_auto_populated_field(): @@ -2876,7 +2876,7 @@ def test_get_channel_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetChannelRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2888,7 +2888,7 @@ def test_get_channel_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelRequest( - name='name_value', + name="name_value", ) def test_get_channel_use_cached_wrapped_rpc(): @@ -2974,12 +2974,12 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', + name="name_value", + uid="uid_value", + provider="provider_value", state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', + activation_token="activation_token_value", + crypto_key_name="crypto_key_name_value", )) response = await client.get_channel(request) @@ -2991,12 +2991,12 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e # Establish that the response is the type that we expect. assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.provider == "provider_value" assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.activation_token == "activation_token_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.asyncio @@ -3012,7 +3012,7 @@ def test_get_channel_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetChannelRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3044,7 +3044,7 @@ async def test_get_channel_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetChannelRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3080,7 +3080,7 @@ def test_get_channel_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_channel( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3088,7 +3088,7 @@ def test_get_channel_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -3102,7 +3102,7 @@ def test_get_channel_flattened_error(): with pytest.raises(ValueError): client.get_channel( eventarc.GetChannelRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -3122,7 +3122,7 @@ async def test_get_channel_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_channel( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3130,7 +3130,7 @@ async def test_get_channel_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -3144,7 +3144,7 @@ async def test_get_channel_flattened_error_async(): with pytest.raises(ValueError): await client.get_channel( eventarc.GetChannelRequest(), - name='name_value', + name="name_value", ) @@ -3168,8 +3168,8 @@ def test_list_channels(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_channels(request) @@ -3181,8 +3181,8 @@ def test_list_channels(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_channels_non_empty_request_with_auto_populated_field(): @@ -3197,9 +3197,9 @@ def test_list_channels_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.ListChannelsRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3211,9 +3211,9 @@ def test_list_channels_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelsRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", ) def test_list_channels_use_cached_wrapped_rpc(): @@ -3299,8 +3299,8 @@ async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) response = await client.list_channels(request) @@ -3312,8 +3312,8 @@ async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -3329,7 +3329,7 @@ def test_list_channels_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListChannelsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3361,7 +3361,7 @@ async def test_list_channels_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListChannelsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3397,7 +3397,7 @@ def test_list_channels_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_channels( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3405,7 +3405,7 @@ def test_list_channels_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -3419,7 +3419,7 @@ def test_list_channels_flattened_error(): with pytest.raises(ValueError): client.list_channels( eventarc.ListChannelsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -3439,7 +3439,7 @@ async def test_list_channels_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_channels( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3447,7 +3447,7 @@ async def test_list_channels_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -3461,7 +3461,7 @@ async def test_list_channels_flattened_error_async(): with pytest.raises(ValueError): await client.list_channels( eventarc.ListChannelsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3705,8 +3705,8 @@ def test_create_channel_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.CreateChannelRequest( - parent='parent_value', - channel_id='channel_id_value', + parent="parent_value", + channel_id="channel_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3718,8 +3718,8 @@ def test_create_channel_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelRequest( - parent='parent_value', - channel_id='channel_id_value', + parent="parent_value", + channel_id="channel_id_value", ) def test_create_channel_use_cached_wrapped_rpc(): @@ -3842,7 +3842,7 @@ def test_create_channel_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.CreateChannelRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3874,7 +3874,7 @@ async def test_create_channel_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.CreateChannelRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3910,9 +3910,9 @@ def test_create_channel_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_channel( - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) # Establish that the underlying call was made with the expected @@ -3920,13 +3920,13 @@ def test_create_channel_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].channel - mock_val = gce_channel.Channel(name='name_value') + mock_val = gce_channel.Channel(name="name_value") assert arg == mock_val arg = args[0].channel_id - mock_val = 'channel_id_value' + mock_val = "channel_id_value" assert arg == mock_val @@ -3940,9 +3940,9 @@ def test_create_channel_flattened_error(): with pytest.raises(ValueError): client.create_channel( eventarc.CreateChannelRequest(), - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) @pytest.mark.asyncio @@ -3964,9 +3964,9 @@ async def test_create_channel_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_channel( - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) # Establish that the underlying call was made with the expected @@ -3974,13 +3974,13 @@ async def test_create_channel_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].channel - mock_val = gce_channel.Channel(name='name_value') + mock_val = gce_channel.Channel(name="name_value") assert arg == mock_val arg = args[0].channel_id - mock_val = 'channel_id_value' + mock_val = "channel_id_value" assert arg == mock_val @pytest.mark.asyncio @@ -3994,9 +3994,9 @@ async def test_create_channel_flattened_error_async(): with pytest.raises(ValueError): await client.create_channel( eventarc.CreateChannelRequest(), - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) @@ -4177,7 +4177,7 @@ def test_update_channel_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.UpdateChannelRequest() - request.channel.name = 'name_value' + request.channel.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4209,7 +4209,7 @@ async def test_update_channel_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.UpdateChannelRequest() - request.channel.name = 'name_value' + request.channel.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4245,8 +4245,8 @@ def test_update_channel_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_channel( - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -4254,10 +4254,10 @@ def test_update_channel_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].channel - mock_val = gce_channel.Channel(name='name_value') + mock_val = gce_channel.Channel(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -4271,8 +4271,8 @@ def test_update_channel_flattened_error(): with pytest.raises(ValueError): client.update_channel( eventarc.UpdateChannelRequest(), - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -4294,8 +4294,8 @@ async def test_update_channel_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_channel( - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -4303,10 +4303,10 @@ async def test_update_channel_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].channel - mock_val = gce_channel.Channel(name='name_value') + mock_val = gce_channel.Channel(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -4320,8 +4320,8 @@ async def test_update_channel_flattened_error_async(): with pytest.raises(ValueError): await client.update_channel( eventarc.UpdateChannelRequest(), - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4369,7 +4369,7 @@ def test_delete_channel_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.DeleteChannelRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4381,7 +4381,7 @@ def test_delete_channel_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelRequest( - name='name_value', + name="name_value", ) def test_delete_channel_use_cached_wrapped_rpc(): @@ -4504,7 +4504,7 @@ def test_delete_channel_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.DeleteChannelRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4536,7 +4536,7 @@ async def test_delete_channel_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.DeleteChannelRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4572,7 +4572,7 @@ def test_delete_channel_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_channel( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4580,7 +4580,7 @@ def test_delete_channel_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -4594,7 +4594,7 @@ def test_delete_channel_flattened_error(): with pytest.raises(ValueError): client.delete_channel( eventarc.DeleteChannelRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -4616,7 +4616,7 @@ async def test_delete_channel_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_channel( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4624,7 +4624,7 @@ async def test_delete_channel_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -4638,7 +4638,7 @@ async def test_delete_channel_flattened_error_async(): with pytest.raises(ValueError): await client.delete_channel( eventarc.DeleteChannelRequest(), - name='name_value', + name="name_value", ) @@ -4662,8 +4662,8 @@ def test_get_provider(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = discovery.Provider( - name='name_value', - display_name='display_name_value', + name="name_value", + display_name="display_name_value", ) response = client.get_provider(request) @@ -4675,8 +4675,8 @@ def test_get_provider(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" def test_get_provider_non_empty_request_with_auto_populated_field(): @@ -4691,7 +4691,7 @@ def test_get_provider_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetProviderRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4703,7 +4703,7 @@ def test_get_provider_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetProviderRequest( - name='name_value', + name="name_value", ) def test_get_provider_use_cached_wrapped_rpc(): @@ -4789,8 +4789,8 @@ async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( - name='name_value', - display_name='display_name_value', + name="name_value", + display_name="display_name_value", )) response = await client.get_provider(request) @@ -4802,8 +4802,8 @@ async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" @pytest.mark.asyncio @@ -4819,7 +4819,7 @@ def test_get_provider_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetProviderRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4851,7 +4851,7 @@ async def test_get_provider_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetProviderRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4887,7 +4887,7 @@ def test_get_provider_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_provider( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4895,7 +4895,7 @@ def test_get_provider_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -4909,7 +4909,7 @@ def test_get_provider_flattened_error(): with pytest.raises(ValueError): client.get_provider( eventarc.GetProviderRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -4929,7 +4929,7 @@ async def test_get_provider_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_provider( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4937,7 +4937,7 @@ async def test_get_provider_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -4951,7 +4951,7 @@ async def test_get_provider_flattened_error_async(): with pytest.raises(ValueError): await client.get_provider( eventarc.GetProviderRequest(), - name='name_value', + name="name_value", ) @@ -4975,8 +4975,8 @@ def test_list_providers(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_providers(request) @@ -4988,8 +4988,8 @@ def test_list_providers(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListProvidersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_providers_non_empty_request_with_auto_populated_field(): @@ -5004,10 +5004,10 @@ def test_list_providers_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.ListProvidersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5019,10 +5019,10 @@ def test_list_providers_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListProvidersRequest( - parent='parent_value', - page_token='page_token_value', - order_by='order_by_value', - filter='filter_value', + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", ) def test_list_providers_use_cached_wrapped_rpc(): @@ -5108,8 +5108,8 @@ async def test_list_providers_async(transport: str = 'grpc_asyncio', request_typ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) response = await client.list_providers(request) @@ -5121,8 +5121,8 @@ async def test_list_providers_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListProvidersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -5138,7 +5138,7 @@ def test_list_providers_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListProvidersRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5170,7 +5170,7 @@ async def test_list_providers_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListProvidersRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5206,7 +5206,7 @@ def test_list_providers_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_providers( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -5214,7 +5214,7 @@ def test_list_providers_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -5228,7 +5228,7 @@ def test_list_providers_flattened_error(): with pytest.raises(ValueError): client.list_providers( eventarc.ListProvidersRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -5248,7 +5248,7 @@ async def test_list_providers_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_providers( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -5256,7 +5256,7 @@ async def test_list_providers_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -5270,7 +5270,7 @@ async def test_list_providers_flattened_error_async(): with pytest.raises(ValueError): await client.list_providers( eventarc.ListProvidersRequest(), - parent='parent_value', + parent="parent_value", ) @@ -5490,10 +5490,10 @@ def test_get_channel_connection(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', + name="name_value", + uid="uid_value", + channel="channel_value", + activation_token="activation_token_value", ) response = client.get_channel_connection(request) @@ -5505,10 +5505,10 @@ def test_get_channel_connection(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.channel == "channel_value" + assert response.activation_token == "activation_token_value" def test_get_channel_connection_non_empty_request_with_auto_populated_field(): @@ -5523,7 +5523,7 @@ def test_get_channel_connection_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetChannelConnectionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5535,7 +5535,7 @@ def test_get_channel_connection_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelConnectionRequest( - name='name_value', + name="name_value", ) def test_get_channel_connection_use_cached_wrapped_rpc(): @@ -5621,10 +5621,10 @@ async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', req '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', + name="name_value", + uid="uid_value", + channel="channel_value", + activation_token="activation_token_value", )) response = await client.get_channel_connection(request) @@ -5636,10 +5636,10 @@ async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', req # Establish that the response is the type that we expect. assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.channel == "channel_value" + assert response.activation_token == "activation_token_value" @pytest.mark.asyncio @@ -5655,7 +5655,7 @@ def test_get_channel_connection_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetChannelConnectionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5687,7 +5687,7 @@ async def test_get_channel_connection_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetChannelConnectionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5723,7 +5723,7 @@ def test_get_channel_connection_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_channel_connection( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -5731,7 +5731,7 @@ def test_get_channel_connection_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -5745,7 +5745,7 @@ def test_get_channel_connection_flattened_error(): with pytest.raises(ValueError): client.get_channel_connection( eventarc.GetChannelConnectionRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -5765,7 +5765,7 @@ async def test_get_channel_connection_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_channel_connection( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -5773,7 +5773,7 @@ async def test_get_channel_connection_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -5787,7 +5787,7 @@ async def test_get_channel_connection_flattened_error_async(): with pytest.raises(ValueError): await client.get_channel_connection( eventarc.GetChannelConnectionRequest(), - name='name_value', + name="name_value", ) @@ -5811,8 +5811,8 @@ def test_list_channel_connections(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_channel_connections(request) @@ -5824,8 +5824,8 @@ def test_list_channel_connections(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_channel_connections_non_empty_request_with_auto_populated_field(): @@ -5840,8 +5840,8 @@ def test_list_channel_connections_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.ListChannelConnectionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5853,8 +5853,8 @@ def test_list_channel_connections_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelConnectionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_channel_connections_use_cached_wrapped_rpc(): @@ -5940,8 +5940,8 @@ async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', r '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) response = await client.list_channel_connections(request) @@ -5953,8 +5953,8 @@ async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', r # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelConnectionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -5970,7 +5970,7 @@ def test_list_channel_connections_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListChannelConnectionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6002,7 +6002,7 @@ async def test_list_channel_connections_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListChannelConnectionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6038,7 +6038,7 @@ def test_list_channel_connections_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_channel_connections( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6046,7 +6046,7 @@ def test_list_channel_connections_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -6060,7 +6060,7 @@ def test_list_channel_connections_flattened_error(): with pytest.raises(ValueError): client.list_channel_connections( eventarc.ListChannelConnectionsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -6080,7 +6080,7 @@ async def test_list_channel_connections_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_channel_connections( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -6088,7 +6088,7 @@ async def test_list_channel_connections_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -6102,7 +6102,7 @@ async def test_list_channel_connections_flattened_error_async(): with pytest.raises(ValueError): await client.list_channel_connections( eventarc.ListChannelConnectionsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -6346,8 +6346,8 @@ def test_create_channel_connection_non_empty_request_with_auto_populated_field() # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.CreateChannelConnectionRequest( - parent='parent_value', - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection_id="channel_connection_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6359,8 +6359,8 @@ def test_create_channel_connection_non_empty_request_with_auto_populated_field() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelConnectionRequest( - parent='parent_value', - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection_id="channel_connection_id_value", ) def test_create_channel_connection_use_cached_wrapped_rpc(): @@ -6483,7 +6483,7 @@ def test_create_channel_connection_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.CreateChannelConnectionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6515,7 +6515,7 @@ async def test_create_channel_connection_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.CreateChannelConnectionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6551,9 +6551,9 @@ def test_create_channel_connection_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_channel_connection( - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection(name="name_value"), + channel_connection_id="channel_connection_id_value", ) # Establish that the underlying call was made with the expected @@ -6561,13 +6561,13 @@ def test_create_channel_connection_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].channel_connection - mock_val = gce_channel_connection.ChannelConnection(name='name_value') + mock_val = gce_channel_connection.ChannelConnection(name="name_value") assert arg == mock_val arg = args[0].channel_connection_id - mock_val = 'channel_connection_id_value' + mock_val = "channel_connection_id_value" assert arg == mock_val @@ -6581,9 +6581,9 @@ def test_create_channel_connection_flattened_error(): with pytest.raises(ValueError): client.create_channel_connection( eventarc.CreateChannelConnectionRequest(), - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection(name="name_value"), + channel_connection_id="channel_connection_id_value", ) @pytest.mark.asyncio @@ -6605,9 +6605,9 @@ async def test_create_channel_connection_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_channel_connection( - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection(name="name_value"), + channel_connection_id="channel_connection_id_value", ) # Establish that the underlying call was made with the expected @@ -6615,13 +6615,13 @@ async def test_create_channel_connection_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].channel_connection - mock_val = gce_channel_connection.ChannelConnection(name='name_value') + mock_val = gce_channel_connection.ChannelConnection(name="name_value") assert arg == mock_val arg = args[0].channel_connection_id - mock_val = 'channel_connection_id_value' + mock_val = "channel_connection_id_value" assert arg == mock_val @pytest.mark.asyncio @@ -6635,9 +6635,9 @@ async def test_create_channel_connection_flattened_error_async(): with pytest.raises(ValueError): await client.create_channel_connection( eventarc.CreateChannelConnectionRequest(), - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection(name="name_value"), + channel_connection_id="channel_connection_id_value", ) @@ -6685,7 +6685,7 @@ def test_delete_channel_connection_non_empty_request_with_auto_populated_field() # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.DeleteChannelConnectionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6697,7 +6697,7 @@ def test_delete_channel_connection_non_empty_request_with_auto_populated_field() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelConnectionRequest( - name='name_value', + name="name_value", ) def test_delete_channel_connection_use_cached_wrapped_rpc(): @@ -6820,7 +6820,7 @@ def test_delete_channel_connection_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.DeleteChannelConnectionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6852,7 +6852,7 @@ async def test_delete_channel_connection_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.DeleteChannelConnectionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6888,7 +6888,7 @@ def test_delete_channel_connection_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_channel_connection( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6896,7 +6896,7 @@ def test_delete_channel_connection_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -6910,7 +6910,7 @@ def test_delete_channel_connection_flattened_error(): with pytest.raises(ValueError): client.delete_channel_connection( eventarc.DeleteChannelConnectionRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -6932,7 +6932,7 @@ async def test_delete_channel_connection_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_channel_connection( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6940,7 +6940,7 @@ async def test_delete_channel_connection_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -6954,7 +6954,7 @@ async def test_delete_channel_connection_flattened_error_async(): with pytest.raises(ValueError): await client.delete_channel_connection( eventarc.DeleteChannelConnectionRequest(), - name='name_value', + name="name_value", ) @@ -6978,8 +6978,8 @@ def test_get_google_channel_config(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", ) response = client.get_google_channel_config(request) @@ -6991,8 +6991,8 @@ def test_get_google_channel_config(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" def test_get_google_channel_config_non_empty_request_with_auto_populated_field(): @@ -7007,7 +7007,7 @@ def test_get_google_channel_config_non_empty_request_with_auto_populated_field() # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = eventarc.GetGoogleChannelConfigRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7019,7 +7019,7 @@ def test_get_google_channel_config_non_empty_request_with_auto_populated_field() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetGoogleChannelConfigRequest( - name='name_value', + name="name_value", ) def test_get_google_channel_config_use_cached_wrapped_rpc(): @@ -7105,8 +7105,8 @@ async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", )) response = await client.get_google_channel_config(request) @@ -7118,8 +7118,8 @@ async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', # Establish that the response is the type that we expect. assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.asyncio @@ -7135,7 +7135,7 @@ def test_get_google_channel_config_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetGoogleChannelConfigRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7167,7 +7167,7 @@ async def test_get_google_channel_config_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetGoogleChannelConfigRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7203,7 +7203,7 @@ def test_get_google_channel_config_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_google_channel_config( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7211,7 +7211,7 @@ def test_get_google_channel_config_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7225,7 +7225,7 @@ def test_get_google_channel_config_flattened_error(): with pytest.raises(ValueError): client.get_google_channel_config( eventarc.GetGoogleChannelConfigRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -7245,7 +7245,7 @@ async def test_get_google_channel_config_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_google_channel_config( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7253,7 +7253,7 @@ async def test_get_google_channel_config_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -7267,7 +7267,7 @@ async def test_get_google_channel_config_flattened_error_async(): with pytest.raises(ValueError): await client.get_google_channel_config( eventarc.GetGoogleChannelConfigRequest(), - name='name_value', + name="name_value", ) @@ -7291,8 +7291,8 @@ def test_update_google_channel_config(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", ) response = client.update_google_channel_config(request) @@ -7304,8 +7304,8 @@ def test_update_google_channel_config(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" def test_update_google_channel_config_non_empty_request_with_auto_populated_field(): @@ -7416,8 +7416,8 @@ async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", )) response = await client.update_google_channel_config(request) @@ -7429,8 +7429,8 @@ async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio # Establish that the response is the type that we expect. assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.asyncio @@ -7446,7 +7446,7 @@ def test_update_google_channel_config_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.UpdateGoogleChannelConfigRequest() - request.google_channel_config.name = 'name_value' + request.google_channel_config.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7478,7 +7478,7 @@ async def test_update_google_channel_config_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.UpdateGoogleChannelConfigRequest() - request.google_channel_config.name = 'name_value' + request.google_channel_config.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7514,8 +7514,8 @@ def test_update_google_channel_config_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_google_channel_config( - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -7523,10 +7523,10 @@ def test_update_google_channel_config_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].google_channel_config - mock_val = gce_google_channel_config.GoogleChannelConfig(name='name_value') + mock_val = gce_google_channel_config.GoogleChannelConfig(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -7540,8 +7540,8 @@ def test_update_google_channel_config_flattened_error(): with pytest.raises(ValueError): client.update_google_channel_config( eventarc.UpdateGoogleChannelConfigRequest(), - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -7561,8 +7561,8 @@ async def test_update_google_channel_config_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_google_channel_config( - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -7570,10 +7570,10 @@ async def test_update_google_channel_config_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].google_channel_config - mock_val = gce_google_channel_config.GoogleChannelConfig(name='name_value') + mock_val = gce_google_channel_config.GoogleChannelConfig(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -7587,8 +7587,8 @@ async def test_update_google_channel_config_flattened_error_async(): with pytest.raises(ValueError): await client.update_google_channel_config( eventarc.UpdateGoogleChannelConfigRequest(), - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -7645,14 +7645,14 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7720,7 +7720,7 @@ def test_get_trigger_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -7754,7 +7754,7 @@ def test_get_trigger_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_trigger( eventarc.GetTriggerRequest(), - name='name_value', + name="name_value", ) @@ -7811,7 +7811,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -7820,7 +7820,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7888,7 +7888,7 @@ def test_list_triggers_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -7922,7 +7922,7 @@ def test_list_triggers_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_triggers( eventarc.ListTriggersRequest(), - parent='parent_value', + parent="parent_value", ) @@ -8053,8 +8053,8 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["triggerId"] = 'trigger_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["triggerId"] = "trigger_id_value" jsonified_request["validateOnly"] = True unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) @@ -8064,9 +8064,9 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' + assert jsonified_request["triggerId"] == "trigger_id_value" assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == True @@ -8142,9 +8142,9 @@ def test_create_trigger_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) mock_args.update(sample_request) @@ -8176,9 +8176,9 @@ def test_create_trigger_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_trigger( eventarc.CreateTriggerRequest(), - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + parent="parent_value", + trigger=gce_trigger.Trigger(name="name_value"), + trigger_id="trigger_id_value", ) @@ -8321,8 +8321,8 @@ def test_update_trigger_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) mock_args.update(sample_request) @@ -8355,8 +8355,8 @@ def test_update_trigger_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_trigger( eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + trigger=gce_trigger.Trigger(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), allow_missing=True, ) @@ -8422,7 +8422,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" jsonified_request["validateOnly"] = True unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) @@ -8432,7 +8432,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == True @@ -8503,7 +8503,7 @@ def test_delete_trigger_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", allow_missing=True, ) mock_args.update(sample_request) @@ -8536,7 +8536,7 @@ def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_trigger( eventarc.DeleteTriggerRequest(), - name='name_value', + name="name_value", allow_missing=True, ) @@ -8594,14 +8594,14 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8669,7 +8669,7 @@ def test_get_channel_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -8703,7 +8703,7 @@ def test_get_channel_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_channel( eventarc.GetChannelRequest(), - name='name_value', + name="name_value", ) @@ -8760,7 +8760,7 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -8769,7 +8769,7 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8837,7 +8837,7 @@ def test_list_channels_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -8871,7 +8871,7 @@ def test_list_channels_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_channels( eventarc.ListChannelsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -9002,8 +9002,8 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["channelId"] = 'channel_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["channelId"] = "channel_id_value" jsonified_request["validateOnly"] = True unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) @@ -9013,9 +9013,9 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "channelId" in jsonified_request - assert jsonified_request["channelId"] == 'channel_id_value' + assert jsonified_request["channelId"] == "channel_id_value" assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == True @@ -9091,9 +9091,9 @@ def test_create_channel_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) mock_args.update(sample_request) @@ -9125,9 +9125,9 @@ def test_create_channel_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_channel( eventarc.CreateChannelRequest(), - parent='parent_value', - channel=gce_channel.Channel(name='name_value'), - channel_id='channel_id_value', + parent="parent_value", + channel=gce_channel.Channel(name="name_value"), + channel_id="channel_id_value", ) @@ -9270,8 +9270,8 @@ def test_update_channel_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -9303,8 +9303,8 @@ def test_update_channel_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_channel( eventarc.UpdateChannelRequest(), - channel=gce_channel.Channel(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + channel=gce_channel.Channel(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -9369,7 +9369,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" jsonified_request["validateOnly"] = True unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) @@ -9379,7 +9379,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "validateOnly" in jsonified_request assert jsonified_request["validateOnly"] == True @@ -9450,7 +9450,7 @@ def test_delete_channel_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -9482,7 +9482,7 @@ def test_delete_channel_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_channel( eventarc.DeleteChannelRequest(), - name='name_value', + name="name_value", ) @@ -9539,14 +9539,14 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9614,7 +9614,7 @@ def test_get_provider_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -9648,7 +9648,7 @@ def test_get_provider_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_provider( eventarc.GetProviderRequest(), - name='name_value', + name="name_value", ) @@ -9705,7 +9705,7 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -9714,7 +9714,7 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9782,7 +9782,7 @@ def test_list_providers_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -9816,7 +9816,7 @@ def test_list_providers_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_providers( eventarc.ListProvidersRequest(), - parent='parent_value', + parent="parent_value", ) @@ -9935,14 +9935,14 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10010,7 +10010,7 @@ def test_get_channel_connection_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -10044,7 +10044,7 @@ def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_channel_connection( eventarc.GetChannelConnectionRequest(), - name='name_value', + name="name_value", ) @@ -10101,7 +10101,7 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -10110,7 +10110,7 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10178,7 +10178,7 @@ def test_list_channel_connections_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -10212,7 +10212,7 @@ def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_channel_connections( eventarc.ListChannelConnectionsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -10339,8 +10339,8 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr assert "channelConnectionId" in jsonified_request assert jsonified_request["channelConnectionId"] == request_init["channel_connection_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["channelConnectionId"] = 'channel_connection_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["channelConnectionId"] = "channel_connection_id_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -10349,9 +10349,9 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "channelConnectionId" in jsonified_request - assert jsonified_request["channelConnectionId"] == 'channel_connection_id_value' + assert jsonified_request["channelConnectionId"] == "channel_connection_id_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10421,9 +10421,9 @@ def test_create_channel_connection_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection(name="name_value"), + channel_connection_id="channel_connection_id_value", ) mock_args.update(sample_request) @@ -10455,9 +10455,9 @@ def test_create_channel_connection_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.create_channel_connection( eventarc.CreateChannelConnectionRequest(), - parent='parent_value', - channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), - channel_connection_id='channel_connection_id_value', + parent="parent_value", + channel_connection=gce_channel_connection.ChannelConnection(name="name_value"), + channel_connection_id="channel_connection_id_value", ) @@ -10518,14 +10518,14 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10590,7 +10590,7 @@ def test_delete_channel_connection_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -10622,7 +10622,7 @@ def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.delete_channel_connection( eventarc.DeleteChannelConnectionRequest(), - name='name_value', + name="name_value", ) @@ -10679,14 +10679,14 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10754,7 +10754,7 @@ def test_get_google_channel_config_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -10788,7 +10788,7 @@ def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest') with pytest.raises(ValueError): client.get_google_channel_config( eventarc.GetGoogleChannelConfigRequest(), - name='name_value', + name="name_value", ) @@ -10918,8 +10918,8 @@ def test_update_google_channel_config_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -10953,8 +10953,8 @@ def test_update_google_channel_config_rest_flattened_error(transport: str = 'res with pytest.raises(ValueError): client.update_google_channel_config( eventarc.UpdateGoogleChannelConfigRequest(), - google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -11504,11 +11504,11 @@ async def test_get_trigger_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', + name="name_value", + uid="uid_value", + service_account="service_account_value", + channel="channel_value", + etag="etag_value", )) await client.get_trigger(request=None) @@ -11535,8 +11535,8 @@ async def test_list_triggers_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) await client.list_triggers(request=None) @@ -11644,12 +11644,12 @@ async def test_get_channel_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', + name="name_value", + uid="uid_value", + provider="provider_value", state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', + activation_token="activation_token_value", + crypto_key_name="crypto_key_name_value", )) await client.get_channel(request=None) @@ -11676,8 +11676,8 @@ async def test_list_channels_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) await client.list_channels(request=None) @@ -11785,8 +11785,8 @@ async def test_get_provider_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( - name='name_value', - display_name='display_name_value', + name="name_value", + display_name="display_name_value", )) await client.get_provider(request=None) @@ -11813,8 +11813,8 @@ async def test_list_providers_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) await client.list_providers(request=None) @@ -11841,10 +11841,10 @@ async def test_get_channel_connection_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', + name="name_value", + uid="uid_value", + channel="channel_value", + activation_token="activation_token_value", )) await client.get_channel_connection(request=None) @@ -11871,8 +11871,8 @@ async def test_list_channel_connections_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) await client.list_channel_connections(request=None) @@ -11953,8 +11953,8 @@ async def test_get_google_channel_config_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", )) await client.get_google_channel_config(request=None) @@ -11981,8 +11981,8 @@ async def test_update_google_channel_config_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", )) await client.update_google_channel_config(request=None) @@ -12041,11 +12041,11 @@ def test_get_trigger_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', + name="name_value", + uid="uid_value", + service_account="service_account_value", + channel="channel_value", + etag="etag_value", ) # Wrap the value into a proper Response obj @@ -12062,11 +12062,11 @@ def test_get_trigger_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.service_account == "service_account_value" + assert response.channel == "channel_value" + assert response.etag == "etag_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -12155,8 +12155,8 @@ def test_list_triggers_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -12173,8 +12173,8 @@ def test_list_triggers_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTriggersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -12694,13 +12694,13 @@ def test_get_channel_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', + name="name_value", + uid="uid_value", + provider="provider_value", state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - pubsub_topic='pubsub_topic_value', + activation_token="activation_token_value", + crypto_key_name="crypto_key_name_value", + pubsub_topic="pubsub_topic_value", ) # Wrap the value into a proper Response obj @@ -12717,12 +12717,12 @@ def test_get_channel_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.provider == "provider_value" assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.activation_token == "activation_token_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -12811,8 +12811,8 @@ def test_list_channels_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -12829,8 +12829,8 @@ def test_list_channels_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -13350,8 +13350,8 @@ def test_get_provider_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = discovery.Provider( - name='name_value', - display_name='display_name_value', + name="name_value", + display_name="display_name_value", ) # Wrap the value into a proper Response obj @@ -13368,8 +13368,8 @@ def test_get_provider_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -13458,8 +13458,8 @@ def test_list_providers_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -13476,8 +13476,8 @@ def test_list_providers_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListProvidersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -13566,10 +13566,10 @@ def test_get_channel_connection_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', + name="name_value", + uid="uid_value", + channel="channel_value", + activation_token="activation_token_value", ) # Wrap the value into a proper Response obj @@ -13586,10 +13586,10 @@ def test_get_channel_connection_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.channel == "channel_value" + assert response.activation_token == "activation_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -13678,8 +13678,8 @@ def test_list_channel_connections_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -13696,8 +13696,8 @@ def test_list_channel_connections_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14052,8 +14052,8 @@ def test_get_google_channel_config_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", ) # Wrap the value into a proper Response obj @@ -14070,8 +14070,8 @@ def test_get_google_channel_config_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -14224,8 +14224,8 @@ def get_message_fields(field): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', + name="name_value", + crypto_key_name="crypto_key_name_value", ) # Wrap the value into a proper Response obj @@ -14242,8 +14242,8 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' + assert response.name == "name_value" + assert response.crypto_key_name == "crypto_key_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/tests/integration/goldens/logging/google/cloud/logging/__init__.py index 0e30784fd9..8f9413c0c7 100755 --- a/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -102,86 +102,87 @@ from google.cloud.logging_v2.types.logging_metrics import LogMetric from google.cloud.logging_v2.types.logging_metrics import UpdateLogMetricRequest -__all__ = ('ConfigServiceV2Client', - 'ConfigServiceV2AsyncClient', - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', - 'MetricsServiceV2Client', - 'MetricsServiceV2AsyncClient', - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryDataset', - 'BigQueryOptions', - 'BucketMetadata', - 'CmekSettings', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesRequest', - 'CopyLogEntriesResponse', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateLinkRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteLinkRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetLinkRequest', - 'GetSettingsRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'IndexConfig', - 'Link', - 'LinkMetadata', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListLinksRequest', - 'ListLinksResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LocationMetadata', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'Settings', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSettingsRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'IndexType', - 'LifecycleState', - 'OperationState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', +__all__ = ( + "ConfigServiceV2Client", + "ConfigServiceV2AsyncClient", + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", + "MetricsServiceV2Client", + "MetricsServiceV2AsyncClient", + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 242a438825..125b4bed12 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -112,10 +112,10 @@ from .types.logging_metrics import LogMetric from .types.logging_metrics import UpdateLogMetricRequest -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.logging_v2") # type: ignore - api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.logging_v2") # type: ignore + api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: @@ -125,20 +125,24 @@ _py_version_str = sys.version.split()[0] _package_label = "google.cloud.logging_v2" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -176,107 +180,111 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'ConfigServiceV2AsyncClient', - 'LoggingServiceV2AsyncClient', - 'MetricsServiceV2AsyncClient', -'BigQueryDataset', -'BigQueryOptions', -'BucketMetadata', -'CmekSettings', -'ConfigServiceV2Client', -'CopyLogEntriesMetadata', -'CopyLogEntriesRequest', -'CopyLogEntriesResponse', -'CreateBucketRequest', -'CreateExclusionRequest', -'CreateLinkRequest', -'CreateLogMetricRequest', -'CreateSinkRequest', -'CreateViewRequest', -'DeleteBucketRequest', -'DeleteExclusionRequest', -'DeleteLinkRequest', -'DeleteLogMetricRequest', -'DeleteLogRequest', -'DeleteSinkRequest', -'DeleteViewRequest', -'GetBucketRequest', -'GetCmekSettingsRequest', -'GetExclusionRequest', -'GetLinkRequest', -'GetLogMetricRequest', -'GetSettingsRequest', -'GetSinkRequest', -'GetViewRequest', -'IndexConfig', -'IndexType', -'LifecycleState', -'Link', -'LinkMetadata', -'ListBucketsRequest', -'ListBucketsResponse', -'ListExclusionsRequest', -'ListExclusionsResponse', -'ListLinksRequest', -'ListLinksResponse', -'ListLogEntriesRequest', -'ListLogEntriesResponse', -'ListLogMetricsRequest', -'ListLogMetricsResponse', -'ListLogsRequest', -'ListLogsResponse', -'ListMonitoredResourceDescriptorsRequest', -'ListMonitoredResourceDescriptorsResponse', -'ListSinksRequest', -'ListSinksResponse', -'ListViewsRequest', -'ListViewsResponse', -'LocationMetadata', -'LogBucket', -'LogEntry', -'LogEntryOperation', -'LogEntrySourceLocation', -'LogExclusion', -'LogMetric', -'LogSink', -'LogSplit', -'LogView', -'LoggingServiceV2Client', -'MetricsServiceV2Client', -'OperationState', -'Settings', -'TailLogEntriesRequest', -'TailLogEntriesResponse', -'UndeleteBucketRequest', -'UpdateBucketRequest', -'UpdateCmekSettingsRequest', -'UpdateExclusionRequest', -'UpdateLogMetricRequest', -'UpdateSettingsRequest', -'UpdateSinkRequest', -'UpdateViewRequest', -'WriteLogEntriesPartialErrors', -'WriteLogEntriesRequest', -'WriteLogEntriesResponse', + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "ConfigServiceV2AsyncClient", + "ConfigServiceV2Client", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateLogMetricRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteLogMetricRequest", + "DeleteLogRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetLogMetricRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "IndexType", + "LifecycleState", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogExclusion", + "LoggingServiceV2AsyncClient", + "LoggingServiceV2Client", + "LogMetric", + "LogSink", + "LogSplit", + "LogView", + "MetricsServiceV2AsyncClient", + "MetricsServiceV2Client", + "OperationState", + "Settings", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateLogMetricRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index 7c1b69fb60..187d00d52b 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -17,6 +17,6 @@ from .async_client import ConfigServiceV2AsyncClient __all__ = ( - 'ConfigServiceV2Client', - 'ConfigServiceV2AsyncClient', + "ConfigServiceV2Client", + "ConfigServiceV2AsyncClient", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index b5d7d26514..0231fa0eaa 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.logging_v2 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -38,7 +49,7 @@ from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -48,12 +59,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class ConfigServiceV2AsyncClient: """Service for configuring sinks used to route log entries.""" @@ -188,12 +201,14 @@ def universe_domain(self) -> str: get_transport_class = ConfigServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the config service v2 async client. Args: @@ -248,31 +263,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.ConfigServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.ConfigServiceV2", "credentialsType": None, - } + }, ) - async def list_buckets(self, - request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBucketsAsyncPager: + async def list_buckets( + self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. .. code-block:: python @@ -346,8 +363,7 @@ async def sample_list_buckets(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -365,11 +381,13 @@ async def sample_list_buckets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -396,13 +414,14 @@ async def sample_list_buckets(): # Done; return the response. return response - async def get_bucket(self, - request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def get_bucket( + self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Gets a log bucket. .. code-block:: python @@ -460,11 +479,13 @@ async def sample_get_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -480,13 +501,14 @@ async def sample_get_bucket(): # Done; return the response. return response - async def create_bucket_async(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location @@ -555,11 +577,13 @@ async def sample_create_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -583,13 +607,14 @@ async def sample_create_bucket_async(): # Done; return the response. return response - async def update_bucket_async(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -660,11 +685,13 @@ async def sample_update_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -688,13 +715,14 @@ async def sample_update_bucket_async(): # Done; return the response. return response - async def create_bucket(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def create_bucket( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed. @@ -755,11 +783,13 @@ async def sample_create_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -775,13 +805,14 @@ async def sample_create_bucket(): # Done; return the response. return response - async def update_bucket(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def update_bucket( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -845,11 +876,13 @@ async def sample_update_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -865,13 +898,14 @@ async def sample_update_bucket(): # Done; return the response. return response - async def delete_bucket(self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a log bucket. Changes the bucket's ``lifecycle_state`` to the @@ -925,11 +959,13 @@ async def sample_delete_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -942,13 +978,14 @@ async def sample_delete_bucket(): metadata=metadata, ) - async def undelete_bucket(self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days. @@ -999,11 +1036,13 @@ async def sample_undelete_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1016,14 +1055,15 @@ async def sample_undelete_bucket(): metadata=metadata, ) - async def list_views(self, - request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViewsAsyncPager: + async def list_views( + self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1089,8 +1129,7 @@ async def sample_list_views(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1108,11 +1147,13 @@ async def sample_list_views(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1139,13 +1180,14 @@ async def sample_list_views(): # Done; return the response. return response - async def get_view(self, - request: Optional[Union[logging_config.GetViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def get_view( + self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Gets a view on a log bucket.. .. code-block:: python @@ -1203,11 +1245,13 @@ async def sample_get_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1223,13 +1267,14 @@ async def sample_get_view(): # Done; return the response. return response - async def create_view(self, - request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def create_view( + self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1289,11 +1334,13 @@ async def sample_create_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1309,13 +1356,14 @@ async def sample_create_view(): # Done; return the response. return response - async def update_view(self, - request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def update_view( + self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: ``filter``. If an ``UNAVAILABLE`` error is returned, this @@ -1377,11 +1425,13 @@ async def sample_update_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1397,13 +1447,14 @@ async def sample_update_view(): # Done; return the response. return response - async def delete_view(self, - request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_view( + self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few @@ -1455,11 +1506,13 @@ async def sample_delete_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1472,14 +1525,15 @@ async def sample_delete_view(): metadata=metadata, ) - async def list_sinks(self, - request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSinksAsyncPager: + async def list_sinks( + self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. .. code-block:: python @@ -1548,8 +1602,7 @@ async def sample_list_sinks(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1567,11 +1620,13 @@ async def sample_list_sinks(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1598,14 +1653,15 @@ async def sample_list_sinks(): # Done; return the response. return response - async def get_sink(self, - request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def get_sink( + self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Gets a sink. .. code-block:: python @@ -1681,8 +1737,7 @@ async def sample_get_sink(): flattened_params = [sink_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1700,11 +1755,13 @@ async def sample_get_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1720,15 +1777,16 @@ async def sample_get_sink(): # Done; return the response. return response - async def create_sink(self, - request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def create_sink( + self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's ``writer_identity`` is not @@ -1820,8 +1878,7 @@ async def sample_create_sink(): flattened_params = [parent, sink] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1841,11 +1898,13 @@ async def sample_create_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1861,16 +1920,17 @@ async def sample_create_sink(): # Done; return the response. return response - async def update_sink(self, - request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def update_sink( + self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and ``filter``. @@ -1986,8 +2046,7 @@ async def sample_update_sink(): flattened_params = [sink_name, sink, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2009,11 +2068,13 @@ async def sample_update_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2029,14 +2090,15 @@ async def sample_update_sink(): # Done; return the response. return response - async def delete_sink(self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2098,8 +2160,7 @@ async def sample_delete_sink(): flattened_params = [sink_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2117,11 +2178,13 @@ async def sample_delete_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2134,16 +2197,17 @@ async def sample_delete_sink(): metadata=metadata, ) - async def create_link(self, - request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - link: Optional[logging_config.Link] = None, - link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently @@ -2233,8 +2297,7 @@ async def sample_create_link(): flattened_params = [parent, link, link_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2256,11 +2319,13 @@ async def sample_create_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2284,14 +2349,15 @@ async def sample_create_link(): # Done; return the response. return response - async def delete_link(self, - request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2369,8 +2435,7 @@ async def sample_delete_link(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2388,11 +2453,13 @@ async def sample_delete_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2416,14 +2483,15 @@ async def sample_delete_link(): # Done; return the response. return response - async def list_links(self, - request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLinksAsyncPager: + async def list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksAsyncPager: r"""Lists links. .. code-block:: python @@ -2491,8 +2559,7 @@ async def sample_list_links(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2510,11 +2577,13 @@ async def sample_list_links(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2541,14 +2610,15 @@ async def sample_list_links(): # Done; return the response. return response - async def get_link(self, - request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Link: + async def get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: r"""Gets a link. .. code-block:: python @@ -2611,8 +2681,7 @@ async def sample_get_link(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2630,11 +2699,13 @@ async def sample_get_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2650,14 +2721,15 @@ async def sample_get_link(): # Done; return the response. return response - async def list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListExclusionsAsyncPager: + async def list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2727,8 +2799,7 @@ async def sample_list_exclusions(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2746,11 +2817,13 @@ async def sample_list_exclusions(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2777,14 +2850,15 @@ async def sample_list_exclusions(): # Done; return the response. return response - async def get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -2858,8 +2932,7 @@ async def sample_get_exclusion(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2877,11 +2950,13 @@ async def sample_get_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2897,15 +2972,16 @@ async def sample_get_exclusion(): # Done; return the response. return response - async def create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, - *, - parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def create_exclusion( + self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. @@ -2996,8 +3072,7 @@ async def sample_create_exclusion(): flattened_params = [parent, exclusion] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3017,11 +3092,13 @@ async def sample_create_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3037,16 +3114,17 @@ async def sample_create_exclusion(): # Done; return the response. return response - async def update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def update_exclusion( + self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3148,8 +3226,7 @@ async def sample_update_exclusion(): flattened_params = [name, exclusion, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3171,11 +3248,13 @@ async def sample_update_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3191,14 +3270,15 @@ async def sample_update_exclusion(): # Done; return the response. return response - async def delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_exclusion( + self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an exclusion in the \_Default sink. .. code-block:: python @@ -3259,8 +3339,7 @@ async def sample_delete_exclusion(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3278,11 +3357,13 @@ async def sample_delete_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3295,13 +3376,14 @@ async def sample_delete_exclusion(): metadata=metadata, ) - async def get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + async def get_cmek_settings( + self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud @@ -3383,11 +3465,13 @@ async def sample_get_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3403,13 +3487,14 @@ async def sample_get_cmek_settings(): # Done; return the response. return response - async def update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + async def update_cmek_settings( + self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured @@ -3496,11 +3581,13 @@ async def sample_update_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3516,14 +3603,15 @@ async def sample_update_cmek_settings(): # Done; return the response. return response - async def get_settings(self, - request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + async def get_settings( + self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud @@ -3615,8 +3703,7 @@ async def sample_get_settings(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3634,11 +3721,13 @@ async def sample_get_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3654,15 +3743,16 @@ async def sample_get_settings(): # Done; return the response. return response - async def update_settings(self, - request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, - *, - settings: Optional[logging_config.Settings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + async def update_settings( + self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be @@ -3761,8 +3851,7 @@ async def sample_update_settings(): flattened_params = [settings, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3782,11 +3871,13 @@ async def sample_update_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3802,13 +3893,14 @@ async def sample_update_settings(): # Done; return the response. return response - async def copy_log_entries(self, - request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def copy_log_entries( + self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3933,17 +4025,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3985,17 +4078,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4040,16 +4134,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "ConfigServiceV2AsyncClient": return self @@ -4057,12 +4153,11 @@ async def __aenter__(self) -> "ConfigServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "ConfigServiceV2AsyncClient", -) +__all__ = ("ConfigServiceV2AsyncClient",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index b2e701b095..c8f6c95975 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -52,7 +65,7 @@ from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -68,13 +81,15 @@ class ConfigServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ConfigServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConfigServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -110,9 +125,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -121,16 +134,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -144,21 +156,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -193,7 +203,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): ConfigServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -210,139 +221,236 @@ def transport(self) -> ConfigServiceV2Transport: return self._transport @staticmethod - def cmek_settings_path(project: str,) -> str: + def cmek_settings_path( + project: str, + ) -> str: """Returns a fully-qualified cmek_settings string.""" - return "projects/{project}/cmekSettings".format(project=project, ) + return "projects/{project}/cmekSettings".format( + project=project, + ) @staticmethod - def parse_cmek_settings_path(path: str) -> Dict[str,str]: + def parse_cmek_settings_path( + path: str, + ) -> Dict[str, str]: """Parses a cmek_settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} @staticmethod - def link_path(project: str,location: str,bucket: str,link: str,) -> str: + def link_path( + project: str, + location: str, + bucket: str, + link: str, + ) -> str: """Returns a fully-qualified link string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) @staticmethod - def parse_link_path(path: str) -> Dict[str,str]: + def parse_link_path( + path: str, + ) -> Dict[str, str]: """Parses a link path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_bucket_path(project: str,location: str,bucket: str,) -> str: + def log_bucket_path( + project: str, + location: str, + bucket: str, + ) -> str: """Returns a fully-qualified log_bucket string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + return "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, + location=location, + bucket=bucket, + ) @staticmethod - def parse_log_bucket_path(path: str) -> Dict[str,str]: + def parse_log_bucket_path( + path: str, + ) -> Dict[str, str]: """Parses a log_bucket path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_exclusion_path(project: str,exclusion: str,) -> str: + def log_exclusion_path( + project: str, + exclusion: str, + ) -> str: """Returns a fully-qualified log_exclusion string.""" - return "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + return "projects/{project}/exclusions/{exclusion}".format( + project=project, + exclusion=exclusion, + ) @staticmethod - def parse_log_exclusion_path(path: str) -> Dict[str,str]: + def parse_log_exclusion_path( + path: str, + ) -> Dict[str, str]: """Parses a log_exclusion path into its component segments.""" m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_sink_path(project: str,sink: str,) -> str: + def log_sink_path( + project: str, + sink: str, + ) -> str: """Returns a fully-qualified log_sink string.""" - return "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + return "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) @staticmethod - def parse_log_sink_path(path: str) -> Dict[str,str]: + def parse_log_sink_path( + path: str, + ) -> Dict[str, str]: """Parses a log_sink path into its component segments.""" m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_view_path(project: str,location: str,bucket: str,view: str,) -> str: + def log_view_path( + project: str, + location: str, + bucket: str, + view: str, + ) -> str: """Returns a fully-qualified log_view string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) @staticmethod - def parse_log_view_path(path: str) -> Dict[str,str]: + def parse_log_view_path( + path: str, + ) -> Dict[str, str]: """Parses a log_view path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def settings_path(project: str,) -> str: + def settings_path( + project: str, + ) -> str: """Returns a fully-qualified settings string.""" - return "projects/{project}/settings".format(project=project, ) + return "projects/{project}/settings".format( + project=project, + ) @staticmethod - def parse_settings_path(path: str) -> Dict[str,str]: + def parse_settings_path( + path: str, + ) -> Dict[str, str]: """Parses a settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/settings$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -374,8 +482,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = ConfigServiceV2Client._use_client_cert_effective() @@ -505,7 +615,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -545,12 +655,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the config service v2 client. Args: @@ -607,12 +719,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ConfigServiceV2Client._read_environment_variables() self._client_cert_source = ConfigServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = ConfigServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -632,22 +744,22 @@ def __init__(self, *, if transport_provided: # transport is a ConfigServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(ConfigServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - ConfigServiceV2Client._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or ConfigServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -677,25 +789,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.ConfigServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.ConfigServiceV2", "credentialsType": None, - } + }, ) - def list_buckets(self, - request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBucketsPager: + def list_buckets( + self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsPager: r"""Lists log buckets. .. code-block:: python @@ -769,8 +884,7 @@ def sample_list_buckets(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -787,11 +901,13 @@ def sample_list_buckets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -818,13 +934,14 @@ def sample_list_buckets(): # Done; return the response. return response - def get_bucket(self, - request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def get_bucket( + self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Gets a log bucket. .. code-block:: python @@ -882,11 +999,13 @@ def sample_get_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -902,13 +1021,14 @@ def sample_get_bucket(): # Done; return the response. return response - def create_bucket_async(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location @@ -977,11 +1097,13 @@ def sample_create_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1005,13 +1127,14 @@ def sample_create_bucket_async(): # Done; return the response. return response - def update_bucket_async(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -1082,11 +1205,13 @@ def sample_update_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1110,13 +1235,14 @@ def sample_update_bucket_async(): # Done; return the response. return response - def create_bucket(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def create_bucket( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed. @@ -1177,11 +1303,13 @@ def sample_create_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1197,13 +1325,14 @@ def sample_create_bucket(): # Done; return the response. return response - def update_bucket(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def update_bucket( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -1267,11 +1396,13 @@ def sample_update_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1287,13 +1418,14 @@ def sample_update_bucket(): # Done; return the response. return response - def delete_bucket(self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a log bucket. Changes the bucket's ``lifecycle_state`` to the @@ -1347,11 +1479,13 @@ def sample_delete_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1364,13 +1498,14 @@ def sample_delete_bucket(): metadata=metadata, ) - def undelete_bucket(self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days. @@ -1421,11 +1556,13 @@ def sample_undelete_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1438,14 +1575,15 @@ def sample_undelete_bucket(): metadata=metadata, ) - def list_views(self, - request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViewsPager: + def list_views( + self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1511,8 +1649,7 @@ def sample_list_views(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1529,11 +1666,13 @@ def sample_list_views(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1560,13 +1699,14 @@ def sample_list_views(): # Done; return the response. return response - def get_view(self, - request: Optional[Union[logging_config.GetViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def get_view( + self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Gets a view on a log bucket.. .. code-block:: python @@ -1624,11 +1764,13 @@ def sample_get_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1644,13 +1786,14 @@ def sample_get_view(): # Done; return the response. return response - def create_view(self, - request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def create_view( + self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1710,11 +1853,13 @@ def sample_create_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1730,13 +1875,14 @@ def sample_create_view(): # Done; return the response. return response - def update_view(self, - request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def update_view( + self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: ``filter``. If an ``UNAVAILABLE`` error is returned, this @@ -1798,11 +1944,13 @@ def sample_update_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1818,13 +1966,14 @@ def sample_update_view(): # Done; return the response. return response - def delete_view(self, - request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_view( + self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few @@ -1876,11 +2025,13 @@ def sample_delete_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1893,14 +2044,15 @@ def sample_delete_view(): metadata=metadata, ) - def list_sinks(self, - request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSinksPager: + def list_sinks( + self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSinksPager: r"""Lists sinks. .. code-block:: python @@ -1969,8 +2121,7 @@ def sample_list_sinks(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1987,11 +2138,13 @@ def sample_list_sinks(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2018,14 +2171,15 @@ def sample_list_sinks(): # Done; return the response. return response - def get_sink(self, - request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def get_sink( + self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Gets a sink. .. code-block:: python @@ -2101,8 +2255,7 @@ def sample_get_sink(): flattened_params = [sink_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2119,11 +2272,13 @@ def sample_get_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2139,15 +2294,16 @@ def sample_get_sink(): # Done; return the response. return response - def create_sink(self, - request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def create_sink( + self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's ``writer_identity`` is not @@ -2239,8 +2395,7 @@ def sample_create_sink(): flattened_params = [parent, sink] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2259,11 +2414,13 @@ def sample_create_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2279,16 +2436,17 @@ def sample_create_sink(): # Done; return the response. return response - def update_sink(self, - request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def update_sink( + self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and ``filter``. @@ -2404,8 +2562,7 @@ def sample_update_sink(): flattened_params = [sink_name, sink, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2426,11 +2583,13 @@ def sample_update_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2446,14 +2605,15 @@ def sample_update_sink(): # Done; return the response. return response - def delete_sink(self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2515,8 +2675,7 @@ def sample_delete_sink(): flattened_params = [sink_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2533,11 +2692,13 @@ def sample_delete_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2550,16 +2711,17 @@ def sample_delete_sink(): metadata=metadata, ) - def create_link(self, - request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - link: Optional[logging_config.Link] = None, - link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently @@ -2649,8 +2811,7 @@ def sample_create_link(): flattened_params = [parent, link, link_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2671,11 +2832,13 @@ def sample_create_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2699,14 +2862,15 @@ def sample_create_link(): # Done; return the response. return response - def delete_link(self, - request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2784,8 +2948,7 @@ def sample_delete_link(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2802,11 +2965,13 @@ def sample_delete_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2830,14 +2995,15 @@ def sample_delete_link(): # Done; return the response. return response - def list_links(self, - request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLinksPager: + def list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksPager: r"""Lists links. .. code-block:: python @@ -2905,8 +3071,7 @@ def sample_list_links(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2923,11 +3088,13 @@ def sample_list_links(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2954,14 +3121,15 @@ def sample_list_links(): # Done; return the response. return response - def get_link(self, - request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Link: + def get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: r"""Gets a link. .. code-block:: python @@ -3024,8 +3192,7 @@ def sample_get_link(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3042,11 +3209,13 @@ def sample_get_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3062,14 +3231,15 @@ def sample_get_link(): # Done; return the response. return response - def list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListExclusionsPager: + def list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -3139,8 +3309,7 @@ def sample_list_exclusions(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3157,11 +3326,13 @@ def sample_list_exclusions(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3188,14 +3359,15 @@ def sample_list_exclusions(): # Done; return the response. return response - def get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -3269,8 +3441,7 @@ def sample_get_exclusion(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3287,11 +3458,13 @@ def sample_get_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3307,15 +3480,16 @@ def sample_get_exclusion(): # Done; return the response. return response - def create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, - *, - parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def create_exclusion( + self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. @@ -3406,8 +3580,7 @@ def sample_create_exclusion(): flattened_params = [parent, exclusion] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3426,11 +3599,13 @@ def sample_create_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3446,16 +3621,17 @@ def sample_create_exclusion(): # Done; return the response. return response - def update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def update_exclusion( + self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3557,8 +3733,7 @@ def sample_update_exclusion(): flattened_params = [name, exclusion, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3579,11 +3754,13 @@ def sample_update_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3599,14 +3776,15 @@ def sample_update_exclusion(): # Done; return the response. return response - def delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_exclusion( + self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an exclusion in the \_Default sink. .. code-block:: python @@ -3667,8 +3845,7 @@ def sample_delete_exclusion(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3685,11 +3862,13 @@ def sample_delete_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3702,13 +3881,14 @@ def sample_delete_exclusion(): metadata=metadata, ) - def get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + def get_cmek_settings( + self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud @@ -3790,11 +3970,13 @@ def sample_get_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3810,13 +3992,14 @@ def sample_get_cmek_settings(): # Done; return the response. return response - def update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + def update_cmek_settings( + self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured @@ -3903,11 +4086,13 @@ def sample_update_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3923,14 +4108,15 @@ def sample_update_cmek_settings(): # Done; return the response. return response - def get_settings(self, - request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + def get_settings( + self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud @@ -4022,8 +4208,7 @@ def sample_get_settings(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -4040,11 +4225,13 @@ def sample_get_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -4060,15 +4247,16 @@ def sample_get_settings(): # Done; return the response. return response - def update_settings(self, - request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, - *, - settings: Optional[logging_config.Settings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + def update_settings( + self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be @@ -4167,8 +4355,7 @@ def sample_update_settings(): flattened_params = [settings, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -4187,11 +4374,13 @@ def sample_update_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -4207,13 +4396,14 @@ def sample_update_settings(): # Done; return the response. return response - def copy_log_entries(self, - request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def copy_log_entries( + self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -4351,10 +4541,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -4362,7 +4549,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4407,10 +4598,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -4418,7 +4606,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4466,21 +4658,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -4488,6 +4677,4 @@ def cancel_operation( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "ConfigServiceV2Client", -) +__all__ = ("ConfigServiceV2Client",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 1af6b54c99..6ef08a181b 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -44,14 +45,17 @@ class ListBucketsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListBucketsResponse], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListBucketsResponse], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -92,7 +96,7 @@ def __iter__(self) -> Iterator[logging_config.LogBucket]: yield from page.buckets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListBucketsAsyncPager: @@ -112,14 +116,17 @@ class ListBucketsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -154,6 +161,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): async for page in self.pages: @@ -163,7 +171,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListViewsPager: @@ -183,14 +191,17 @@ class ListViewsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListViewsResponse], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -231,7 +242,7 @@ def __iter__(self) -> Iterator[logging_config.LogView]: yield from page.views def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListViewsAsyncPager: @@ -251,14 +262,17 @@ class ListViewsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListViewsResponse]], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -293,6 +307,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): async for page in self.pages: @@ -302,7 +317,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSinksPager: @@ -322,14 +337,17 @@ class ListSinksPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListSinksResponse], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListSinksResponse], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -370,7 +388,7 @@ def __iter__(self) -> Iterator[logging_config.LogSink]: yield from page.sinks def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSinksAsyncPager: @@ -390,14 +408,17 @@ class ListSinksAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListSinksResponse]], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListSinksResponse]], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -432,6 +453,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): async for page in self.pages: @@ -441,7 +463,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLinksPager: @@ -461,14 +483,17 @@ class ListLinksPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListLinksResponse], - request: logging_config.ListLinksRequest, - response: logging_config.ListLinksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -509,7 +534,7 @@ def __iter__(self) -> Iterator[logging_config.Link]: yield from page.links def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLinksAsyncPager: @@ -529,14 +554,17 @@ class ListLinksAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListLinksResponse]], - request: logging_config.ListLinksRequest, - response: logging_config.ListLinksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -571,6 +599,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.Link]: async def async_generator(): async for page in self.pages: @@ -580,7 +609,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListExclusionsPager: @@ -600,14 +629,17 @@ class ListExclusionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListExclusionsResponse], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListExclusionsResponse], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -648,7 +680,7 @@ def __iter__(self) -> Iterator[logging_config.LogExclusion]: yield from page.exclusions def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListExclusionsAsyncPager: @@ -668,14 +700,17 @@ class ListExclusionsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -710,6 +745,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): async for page in self.pages: @@ -719,4 +755,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index cc3da21c11..6f8979ef81 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -23,11 +23,11 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] -_transport_registry['grpc'] = ConfigServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = ConfigServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport __all__ = ( - 'ConfigServiceV2Transport', - 'ConfigServiceV2GrpcTransport', - 'ConfigServiceV2GrpcAsyncIOTransport', + "ConfigServiceV2Transport", + "ConfigServiceV2GrpcTransport", + "ConfigServiceV2GrpcAsyncIOTransport", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 625a388594..1ea14ce3a1 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -25,11 +25,11 @@ from google.api_core import retry as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -41,32 +41,35 @@ class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", ) + # fmt: on - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -102,10 +105,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -113,15 +116,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -383,12 +390,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ @@ -399,293 +406,453 @@ def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() + # fmt: off @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Union[ - logging_config.ListBucketsResponse, - Awaitable[logging_config.ListBucketsResponse] - ]]: + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], + Union[ + logging_config.ListBucketsResponse, + Awaitable[logging_config.ListBucketsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def get_bucket( + self, + ) -> Callable[ + [logging_config.GetBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def create_bucket( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def update_bucket( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_bucket( + self, + ) -> Callable[ + [logging_config.DeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def undelete_bucket( + self, + ) -> Callable[ + [logging_config.UndeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Union[ - logging_config.ListViewsResponse, - Awaitable[logging_config.ListViewsResponse] - ]]: + def list_views( + self, + ) -> Callable[ + [logging_config.ListViewsRequest], + Union[ + logging_config.ListViewsResponse, + Awaitable[logging_config.ListViewsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def get_view( + self, + ) -> Callable[ + [logging_config.GetViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def create_view( + self, + ) -> Callable[ + [logging_config.CreateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def update_view( + self, + ) -> Callable[ + [logging_config.UpdateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_view( + self, + ) -> Callable[ + [logging_config.DeleteViewRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Union[ - logging_config.ListSinksResponse, - Awaitable[logging_config.ListSinksResponse] - ]]: + def list_sinks( + self, + ) -> Callable[ + [logging_config.ListSinksRequest], + Union[ + logging_config.ListSinksResponse, + Awaitable[logging_config.ListSinksResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def get_sink( + self, + ) -> Callable[ + [logging_config.GetSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def create_sink( + self, + ) -> Callable[ + [logging_config.CreateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def update_sink( + self, + ) -> Callable[ + [logging_config.UpdateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_sink( + self, + ) -> Callable[ + [logging_config.DeleteSinkRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - Union[ - logging_config.ListLinksResponse, - Awaitable[logging_config.ListLinksResponse] - ]]: + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - Union[ - logging_config.Link, - Awaitable[logging_config.Link] - ]]: + def get_link( + self, + ) -> Callable[ + [logging_config.GetLinkRequest], + Union[ + logging_config.Link, + Awaitable[logging_config.Link] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Union[ - logging_config.ListExclusionsResponse, - Awaitable[logging_config.ListExclusionsResponse] - ]]: + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], + Union[ + logging_config.ListExclusionsResponse, + Awaitable[logging_config.ListExclusionsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def get_exclusion( + self, + ) -> Callable[ + [logging_config.GetExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def create_exclusion( + self, + ) -> Callable[ + [logging_config.CreateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def update_exclusion( + self, + ) -> Callable[ + [logging_config.UpdateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_exclusion( + self, + ) -> Callable[ + [logging_config.DeleteExclusionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: + def get_cmek_settings( + self, + ) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - Union[ - logging_config.Settings, - Awaitable[logging_config.Settings] - ]]: + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - Union[ - logging_config.Settings, - Awaitable[logging_config.Settings] - ]]: + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -699,19 +866,13 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property @@ -719,6 +880,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'ConfigServiceV2Transport', -) +__all__ = ("ConfigServiceV2Transport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index ac3a4393a8..d911a43a62 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -22,7 +22,7 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -32,12 +32,13 @@ import proto # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -57,10 +58,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -68,7 +71,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -94,7 +97,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -116,28 +119,31 @@ class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -212,7 +218,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -221,7 +228,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -256,19 +264,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -304,13 +314,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -322,17 +331,15 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - logging_config.ListBucketsResponse]: + def list_buckets( + self, + ) -> Callable[[logging_config.ListBucketsRequest], logging_config.ListBucketsResponse]: r"""Return a callable for the list buckets method over gRPC. Lists log buckets. @@ -347,18 +354,18 @@ def list_buckets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, ) - return self._stubs['list_buckets'] + return self._stubs["list_buckets"] @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - logging_config.LogBucket]: + def get_bucket( + self, + ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. Gets a log bucket. @@ -373,18 +380,18 @@ def get_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['get_bucket'] + return self._stubs["get_bucket"] @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - operations_pb2.Operation]: + def create_bucket_async( + self, + ) -> Callable[[logging_config.CreateBucketRequest], operations_pb2.Operation]: r"""Return a callable for the create bucket async method over gRPC. Creates a log bucket asynchronously that can be used @@ -402,18 +409,18 @@ def create_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_bucket_async'] + return self._stubs["create_bucket_async"] @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - operations_pb2.Operation]: + def update_bucket_async( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], operations_pb2.Operation]: r"""Return a callable for the update bucket async method over gRPC. Updates a log bucket asynchronously. @@ -434,18 +441,18 @@ def update_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_bucket_async'] + return self._stubs["update_bucket_async"] @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - logging_config.LogBucket]: + def create_bucket( + self, + ) -> Callable[[logging_config.CreateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the create bucket method over gRPC. Creates a log bucket that can be used to store log @@ -462,18 +469,18 @@ def create_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['create_bucket'] + return self._stubs["create_bucket"] @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - logging_config.LogBucket]: + def update_bucket( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. Updates a log bucket. @@ -494,18 +501,18 @@ def update_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['update_bucket'] + return self._stubs["update_bucket"] @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - empty_pb2.Empty]: + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. Deletes a log bucket. @@ -525,18 +532,18 @@ def delete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_bucket'] + return self._stubs["delete_bucket"] @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - empty_pb2.Empty]: + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a log bucket. A bucket that has been @@ -553,18 +560,18 @@ def undelete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['undelete_bucket'] + return self._stubs["undelete_bucket"] @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - logging_config.ListViewsResponse]: + def list_views( + self, + ) -> Callable[[logging_config.ListViewsRequest], logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. Lists views on a log bucket. @@ -579,18 +586,18 @@ def list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', + if "list_views" not in self._stubs: + self._stubs["list_views"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['list_views'] + return self._stubs["list_views"] @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - logging_config.LogView]: + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], logging_config.LogView]: r"""Return a callable for the get view method over gRPC. Gets a view on a log bucket.. @@ -605,18 +612,18 @@ def get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', + if "get_view" not in self._stubs: + self._stubs["get_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['get_view'] + return self._stubs["get_view"] @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - logging_config.LogView]: + def create_view( + self, + ) -> Callable[[logging_config.CreateViewRequest], logging_config.LogView]: r"""Return a callable for the create view method over gRPC. Creates a view over log entries in a log bucket. A @@ -632,18 +639,18 @@ def create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', + if "create_view" not in self._stubs: + self._stubs["create_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['create_view'] + return self._stubs["create_view"] @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - logging_config.LogView]: + def update_view( + self, + ) -> Callable[[logging_config.UpdateViewRequest], logging_config.LogView]: r"""Return a callable for the update view method over gRPC. Updates a view on a log bucket. This method replaces the @@ -662,18 +669,18 @@ def update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', + if "update_view" not in self._stubs: + self._stubs["update_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['update_view'] + return self._stubs["update_view"] @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - empty_pb2.Empty]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -691,18 +698,18 @@ def delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_view'] + return self._stubs["delete_view"] @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - logging_config.ListSinksResponse]: + def list_sinks( + self, + ) -> Callable[[logging_config.ListSinksRequest], logging_config.ListSinksResponse]: r"""Return a callable for the list sinks method over gRPC. Lists sinks. @@ -717,18 +724,18 @@ def list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['list_sinks'] + return self._stubs["list_sinks"] @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - logging_config.LogSink]: + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], logging_config.LogSink]: r"""Return a callable for the get sink method over gRPC. Gets a sink. @@ -743,18 +750,18 @@ def get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['get_sink'] + return self._stubs["get_sink"] @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - logging_config.LogSink]: + def create_sink( + self, + ) -> Callable[[logging_config.CreateSinkRequest], logging_config.LogSink]: r"""Return a callable for the create sink method over gRPC. Creates a sink that exports specified log entries to a @@ -773,18 +780,18 @@ def create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['create_sink'] + return self._stubs["create_sink"] @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - logging_config.LogSink]: + def update_sink( + self, + ) -> Callable[[logging_config.UpdateSinkRequest], logging_config.LogSink]: r"""Return a callable for the update sink method over gRPC. Updates a sink. This method replaces the following fields in the @@ -804,18 +811,18 @@ def update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['update_sink'] + return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - empty_pb2.Empty]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], empty_pb2.Empty]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -831,18 +838,18 @@ def delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_sink'] + return self._stubs["delete_sink"] @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - operations_pb2.Operation]: + def create_link( + self, + ) -> Callable[[logging_config.CreateLinkRequest], operations_pb2.Operation]: r"""Return a callable for the create link method over gRPC. Asynchronously creates a linked dataset in BigQuery @@ -860,18 +867,18 @@ def create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_link' not in self._stubs: - self._stubs['create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateLink', + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_link'] + return self._stubs["create_link"] @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - operations_pb2.Operation]: + def delete_link( + self, + ) -> Callable[[logging_config.DeleteLinkRequest], operations_pb2.Operation]: r"""Return a callable for the delete link method over gRPC. Deletes a link. This will also delete the @@ -887,18 +894,18 @@ def delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteLink', + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_link'] + return self._stubs["delete_link"] @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - logging_config.ListLinksResponse]: + def list_links( + self, + ) -> Callable[[logging_config.ListLinksRequest], logging_config.ListLinksResponse]: r"""Return a callable for the list links method over gRPC. Lists links. @@ -913,18 +920,18 @@ def list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_links' not in self._stubs: - self._stubs['list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListLinks', + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['list_links'] + return self._stubs["list_links"] @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - logging_config.Link]: + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], logging_config.Link]: r"""Return a callable for the get link method over gRPC. Gets a link. @@ -939,18 +946,18 @@ def get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_link' not in self._stubs: - self._stubs['get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetLink', + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['get_link'] + return self._stubs["get_link"] @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - logging_config.ListExclusionsResponse]: + def list_exclusions( + self, + ) -> Callable[[logging_config.ListExclusionsRequest], logging_config.ListExclusionsResponse]: r"""Return a callable for the list exclusions method over gRPC. Lists all the exclusions on the \_Default sink in a parent @@ -966,18 +973,18 @@ def list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['list_exclusions'] + return self._stubs["list_exclusions"] @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - logging_config.LogExclusion]: + def get_exclusion( + self, + ) -> Callable[[logging_config.GetExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. Gets the description of an exclusion in the \_Default sink. @@ -992,18 +999,18 @@ def get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['get_exclusion'] + return self._stubs["get_exclusion"] @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - logging_config.LogExclusion]: + def create_exclusion( + self, + ) -> Callable[[logging_config.CreateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. Creates a new exclusion in the \_Default sink in a specified @@ -1020,18 +1027,18 @@ def create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['create_exclusion'] + return self._stubs["create_exclusion"] @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - logging_config.LogExclusion]: + def update_exclusion( + self, + ) -> Callable[[logging_config.UpdateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. Changes one or more properties of an existing exclusion in the @@ -1047,18 +1054,18 @@ def update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['update_exclusion'] + return self._stubs["update_exclusion"] @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - empty_pb2.Empty]: + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion in the \_Default sink. @@ -1073,18 +1080,18 @@ def delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_exclusion'] + return self._stubs["delete_exclusion"] @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - logging_config.CmekSettings]: + def get_cmek_settings( + self, + ) -> Callable[[logging_config.GetCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. Gets the Logging CMEK settings for the given resource. @@ -1108,18 +1115,18 @@ def get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['get_cmek_settings'] + return self._stubs["get_cmek_settings"] @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - logging_config.CmekSettings]: + def update_cmek_settings( + self, + ) -> Callable[[logging_config.UpdateCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the update cmek settings method over gRPC. Updates the Log Router CMEK settings for the given resource. @@ -1148,18 +1155,18 @@ def update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['update_cmek_settings'] + return self._stubs["update_cmek_settings"] @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - logging_config.Settings]: + def get_settings( + self, + ) -> Callable[[logging_config.GetSettingsRequest], logging_config.Settings]: r"""Return a callable for the get settings method over gRPC. Gets the Log Router settings for the given resource. @@ -1184,18 +1191,18 @@ def get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSettings', + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['get_settings'] + return self._stubs["get_settings"] @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - logging_config.Settings]: + def update_settings( + self, + ) -> Callable[[logging_config.UpdateSettingsRequest], logging_config.Settings]: r"""Return a callable for the update settings method over gRPC. Updates the Log Router settings for the given resource. @@ -1227,18 +1234,18 @@ def update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSettings', + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['update_settings'] + return self._stubs["update_settings"] @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - operations_pb2.Operation]: + def copy_log_entries( + self, + ) -> Callable[[logging_config.CopyLogEntriesRequest], operations_pb2.Operation]: r"""Return a callable for the copy log entries method over gRPC. Copies a set of log entries from a log bucket to a @@ -1254,13 +1261,13 @@ def copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['copy_log_entries'] + return self._stubs["copy_log_entries"] def close(self): self._logged_channel.close() @@ -1269,8 +1276,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1287,8 +1293,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1305,8 +1310,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1324,6 +1328,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'ConfigServiceV2GrpcTransport', -) +__all__ = ("ConfigServiceV2GrpcTransport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index fc7af0c06b..a1ab5c8294 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -25,23 +25,24 @@ from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -61,10 +62,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -72,7 +75,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -98,7 +101,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -125,13 +128,15 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -162,29 +167,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -259,7 +266,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -268,7 +276,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -328,17 +337,15 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsAsyncClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Awaitable[logging_config.ListBucketsResponse]]: + def list_buckets( + self, + ) -> Callable[[logging_config.ListBucketsRequest], Awaitable[logging_config.ListBucketsResponse]]: r"""Return a callable for the list buckets method over gRPC. Lists log buckets. @@ -353,18 +360,18 @@ def list_buckets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, ) - return self._stubs['list_buckets'] + return self._stubs["list_buckets"] @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Awaitable[logging_config.LogBucket]]: + def get_bucket( + self, + ) -> Callable[[logging_config.GetBucketRequest], Awaitable[logging_config.LogBucket]]: r"""Return a callable for the get bucket method over gRPC. Gets a log bucket. @@ -379,18 +386,18 @@ def get_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['get_bucket'] + return self._stubs["get_bucket"] @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[operations_pb2.Operation]]: + def create_bucket_async( + self, + ) -> Callable[[logging_config.CreateBucketRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create bucket async method over gRPC. Creates a log bucket asynchronously that can be used @@ -408,18 +415,18 @@ def create_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_bucket_async'] + return self._stubs["create_bucket_async"] @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[operations_pb2.Operation]]: + def update_bucket_async( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update bucket async method over gRPC. Updates a log bucket asynchronously. @@ -440,18 +447,18 @@ def update_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_bucket_async'] + return self._stubs["update_bucket_async"] @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[logging_config.LogBucket]]: + def create_bucket( + self, + ) -> Callable[[logging_config.CreateBucketRequest], Awaitable[logging_config.LogBucket]]: r"""Return a callable for the create bucket method over gRPC. Creates a log bucket that can be used to store log @@ -468,18 +475,18 @@ def create_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['create_bucket'] + return self._stubs["create_bucket"] @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[logging_config.LogBucket]]: + def update_bucket( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], Awaitable[logging_config.LogBucket]]: r"""Return a callable for the update bucket method over gRPC. Updates a log bucket. @@ -500,18 +507,18 @@ def update_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['update_bucket'] + return self._stubs["update_bucket"] @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. Deletes a log bucket. @@ -531,18 +538,18 @@ def delete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_bucket'] + return self._stubs["delete_bucket"] @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a log bucket. A bucket that has been @@ -559,18 +566,18 @@ def undelete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['undelete_bucket'] + return self._stubs["undelete_bucket"] @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Awaitable[logging_config.ListViewsResponse]]: + def list_views( + self, + ) -> Callable[[logging_config.ListViewsRequest], Awaitable[logging_config.ListViewsResponse]]: r"""Return a callable for the list views method over gRPC. Lists views on a log bucket. @@ -585,18 +592,18 @@ def list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', + if "list_views" not in self._stubs: + self._stubs["list_views"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['list_views'] + return self._stubs["list_views"] @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Awaitable[logging_config.LogView]]: + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. Gets a view on a log bucket.. @@ -611,18 +618,18 @@ def get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', + if "get_view" not in self._stubs: + self._stubs["get_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['get_view'] + return self._stubs["get_view"] @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Awaitable[logging_config.LogView]]: + def create_view( + self, + ) -> Callable[[logging_config.CreateViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the create view method over gRPC. Creates a view over log entries in a log bucket. A @@ -638,18 +645,18 @@ def create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', + if "create_view" not in self._stubs: + self._stubs["create_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['create_view'] + return self._stubs["create_view"] @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Awaitable[logging_config.LogView]]: + def update_view( + self, + ) -> Callable[[logging_config.UpdateViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the update view method over gRPC. Updates a view on a log bucket. This method replaces the @@ -668,18 +675,18 @@ def update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', + if "update_view" not in self._stubs: + self._stubs["update_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['update_view'] + return self._stubs["update_view"] @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Awaitable[empty_pb2.Empty]]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -697,18 +704,18 @@ def delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_view'] + return self._stubs["delete_view"] @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Awaitable[logging_config.ListSinksResponse]]: + def list_sinks( + self, + ) -> Callable[[logging_config.ListSinksRequest], Awaitable[logging_config.ListSinksResponse]]: r"""Return a callable for the list sinks method over gRPC. Lists sinks. @@ -723,18 +730,18 @@ def list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['list_sinks'] + return self._stubs["list_sinks"] @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Awaitable[logging_config.LogSink]]: + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the get sink method over gRPC. Gets a sink. @@ -749,18 +756,18 @@ def get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['get_sink'] + return self._stubs["get_sink"] @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Awaitable[logging_config.LogSink]]: + def create_sink( + self, + ) -> Callable[[logging_config.CreateSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the create sink method over gRPC. Creates a sink that exports specified log entries to a @@ -779,18 +786,18 @@ def create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['create_sink'] + return self._stubs["create_sink"] @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Awaitable[logging_config.LogSink]]: + def update_sink( + self, + ) -> Callable[[logging_config.UpdateSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the update sink method over gRPC. Updates a sink. This method replaces the following fields in the @@ -810,18 +817,18 @@ def update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['update_sink'] + return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Awaitable[empty_pb2.Empty]]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -837,18 +844,18 @@ def delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_sink'] + return self._stubs["delete_sink"] @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - Awaitable[operations_pb2.Operation]]: + def create_link( + self, + ) -> Callable[[logging_config.CreateLinkRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create link method over gRPC. Asynchronously creates a linked dataset in BigQuery @@ -866,18 +873,18 @@ def create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_link' not in self._stubs: - self._stubs['create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateLink', + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_link'] + return self._stubs["create_link"] @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - Awaitable[operations_pb2.Operation]]: + def delete_link( + self, + ) -> Callable[[logging_config.DeleteLinkRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete link method over gRPC. Deletes a link. This will also delete the @@ -893,18 +900,18 @@ def delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteLink', + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_link'] + return self._stubs["delete_link"] @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - Awaitable[logging_config.ListLinksResponse]]: + def list_links( + self, + ) -> Callable[[logging_config.ListLinksRequest], Awaitable[logging_config.ListLinksResponse]]: r"""Return a callable for the list links method over gRPC. Lists links. @@ -919,18 +926,18 @@ def list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_links' not in self._stubs: - self._stubs['list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListLinks', + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['list_links'] + return self._stubs["list_links"] @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - Awaitable[logging_config.Link]]: + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], Awaitable[logging_config.Link]]: r"""Return a callable for the get link method over gRPC. Gets a link. @@ -945,18 +952,18 @@ def get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_link' not in self._stubs: - self._stubs['get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetLink', + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['get_link'] + return self._stubs["get_link"] @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Awaitable[logging_config.ListExclusionsResponse]]: + def list_exclusions( + self, + ) -> Callable[[logging_config.ListExclusionsRequest], Awaitable[logging_config.ListExclusionsResponse]]: r"""Return a callable for the list exclusions method over gRPC. Lists all the exclusions on the \_Default sink in a parent @@ -972,18 +979,18 @@ def list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['list_exclusions'] + return self._stubs["list_exclusions"] @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def get_exclusion( + self, + ) -> Callable[[logging_config.GetExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the get exclusion method over gRPC. Gets the description of an exclusion in the \_Default sink. @@ -998,18 +1005,18 @@ def get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['get_exclusion'] + return self._stubs["get_exclusion"] @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def create_exclusion( + self, + ) -> Callable[[logging_config.CreateExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the create exclusion method over gRPC. Creates a new exclusion in the \_Default sink in a specified @@ -1026,18 +1033,18 @@ def create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['create_exclusion'] + return self._stubs["create_exclusion"] @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def update_exclusion( + self, + ) -> Callable[[logging_config.UpdateExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the update exclusion method over gRPC. Changes one or more properties of an existing exclusion in the @@ -1053,18 +1060,18 @@ def update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['update_exclusion'] + return self._stubs["update_exclusion"] @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Awaitable[empty_pb2.Empty]]: + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion in the \_Default sink. @@ -1079,18 +1086,18 @@ def delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_exclusion'] + return self._stubs["delete_exclusion"] @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: + def get_cmek_settings( + self, + ) -> Callable[[logging_config.GetCmekSettingsRequest], Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the get cmek settings method over gRPC. Gets the Logging CMEK settings for the given resource. @@ -1114,18 +1121,18 @@ def get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['get_cmek_settings'] + return self._stubs["get_cmek_settings"] @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: + def update_cmek_settings( + self, + ) -> Callable[[logging_config.UpdateCmekSettingsRequest], Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the update cmek settings method over gRPC. Updates the Log Router CMEK settings for the given resource. @@ -1154,18 +1161,18 @@ def update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['update_cmek_settings'] + return self._stubs["update_cmek_settings"] @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - Awaitable[logging_config.Settings]]: + def get_settings( + self, + ) -> Callable[[logging_config.GetSettingsRequest], Awaitable[logging_config.Settings]]: r"""Return a callable for the get settings method over gRPC. Gets the Log Router settings for the given resource. @@ -1190,18 +1197,18 @@ def get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSettings', + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['get_settings'] + return self._stubs["get_settings"] @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - Awaitable[logging_config.Settings]]: + def update_settings( + self, + ) -> Callable[[logging_config.UpdateSettingsRequest], Awaitable[logging_config.Settings]]: r"""Return a callable for the update settings method over gRPC. Updates the Log Router settings for the given resource. @@ -1233,18 +1240,18 @@ def update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSettings', + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['update_settings'] + return self._stubs["update_settings"] @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - Awaitable[operations_pb2.Operation]]: + def copy_log_entries( + self, + ) -> Callable[[logging_config.CopyLogEntriesRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the copy log entries method over gRPC. Copies a set of log entries from a log bucket to a @@ -1260,16 +1267,16 @@ def copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['copy_log_entries'] + return self._stubs["copy_log_entries"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_buckets: self._wrap_method( self.list_buckets, @@ -1541,8 +1548,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1559,8 +1565,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1577,8 +1582,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1592,6 +1596,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'ConfigServiceV2GrpcAsyncIOTransport', -) +__all__ = ("ConfigServiceV2GrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index d9820f0906..41c0dc4fab 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -17,6 +17,6 @@ from .async_client import LoggingServiceV2AsyncClient __all__ = ( - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index f84e9f6ec5..f821db9325 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -16,7 +16,21 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + AsyncIterable, + Awaitable, + AsyncIterator, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.logging_v2 import gapic_version as package_version @@ -24,8 +38,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -38,19 +52,21 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class LoggingServiceV2AsyncClient: """Service for ingesting and querying logs.""" @@ -173,12 +189,14 @@ def universe_domain(self) -> str: get_transport_class = LoggingServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the logging service v2 async client. Args: @@ -233,31 +251,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.LoggingServiceV2", "credentialsType": None, - } + }, ) - async def delete_log(self, - request: Optional[Union[logging.DeleteLogRequest, dict]] = None, - *, - log_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_log( + self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be @@ -322,8 +342,7 @@ async def sample_delete_log(): flattened_params = [log_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -341,11 +360,13 @@ async def sample_delete_log(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("log_name", request.log_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -358,17 +379,18 @@ async def sample_delete_log(): metadata=metadata, ) - async def write_log_entries(self, - request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, - *, - log_name: Optional[str] = None, - resource: Optional[monitored_resource_pb2.MonitoredResource] = None, - labels: Optional[MutableMapping[str, str]] = None, - entries: Optional[MutableSequence[log_entry.LogEntry]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging.WriteLogEntriesResponse: + async def write_log_entries( + self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent @@ -513,8 +535,7 @@ async def sample_write_log_entries(): flattened_params = [log_name, resource, labels, entries] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -551,16 +572,17 @@ async def sample_write_log_entries(): # Done; return the response. return response - async def list_log_entries(self, - request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, - *, - resource_names: Optional[MutableSequence[str]] = None, - filter: Optional[str] = None, - order_by: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogEntriesAsyncPager: + async def list_log_entries( + self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see `Exporting @@ -665,8 +687,7 @@ async def sample_list_log_entries(): flattened_params = [resource_names, filter, order_by] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -711,13 +732,14 @@ async def sample_list_log_entries(): # Done; return the response. return response - async def list_monitored_resource_descriptors(self, - request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + async def list_monitored_resource_descriptors( + self, + request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -803,14 +825,15 @@ async def sample_list_monitored_resource_descriptors(): # Done; return the response. return response - async def list_logs(self, - request: Optional[Union[logging.ListLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogsAsyncPager: + async def list_logs( + self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -879,8 +902,7 @@ async def sample_list_logs(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -898,11 +920,13 @@ async def sample_list_logs(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -929,13 +953,14 @@ async def sample_list_logs(): # Done; return the response. return response - def tail_log_entries(self, - requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + def tail_log_entries( + self, + requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs. @@ -961,7 +986,7 @@ async def sample_tail_log_entries(): ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # "logging_v2.TailLogEntriesRequest" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] @@ -1048,17 +1073,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1100,17 +1126,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1155,16 +1182,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self @@ -1172,12 +1201,11 @@ async def __aenter__(self) -> "LoggingServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "LoggingServiceV2AsyncClient", -) +__all__ = ("LoggingServiceV2AsyncClient",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 8c37951cb5..fcad2a5e13 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -19,7 +19,21 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -28,11 +42,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +56,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -52,7 +67,7 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -65,13 +80,15 @@ class LoggingServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[LoggingServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[LoggingServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -107,9 +124,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -118,16 +133,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -141,21 +155,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -190,7 +202,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): LoggingServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -207,73 +220,116 @@ def transport(self) -> LoggingServiceV2Transport: return self._transport @staticmethod - def log_path(project: str,log: str,) -> str: + def log_path( + project: str, + log: str, + ) -> str: """Returns a fully-qualified log string.""" - return "projects/{project}/logs/{log}".format(project=project, log=log, ) + return "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) @staticmethod - def parse_log_path(path: str) -> Dict[str,str]: + def parse_log_path( + path: str, + ) -> Dict[str, str]: """Parses a log path into its component segments.""" m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -305,8 +361,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = LoggingServiceV2Client._use_client_cert_effective() @@ -436,7 +494,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -476,12 +534,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the logging service v2 client. Args: @@ -538,12 +598,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = LoggingServiceV2Client._read_environment_variables() self._client_cert_source = LoggingServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = LoggingServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -563,22 +623,22 @@ def __init__(self, *, if transport_provided: # transport is a LoggingServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(LoggingServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - LoggingServiceV2Client._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or LoggingServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -608,25 +668,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.LoggingServiceV2", "credentialsType": None, - } + }, ) - def delete_log(self, - request: Optional[Union[logging.DeleteLogRequest, dict]] = None, - *, - log_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_log( + self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be @@ -691,8 +754,7 @@ def sample_delete_log(): flattened_params = [log_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -709,11 +771,13 @@ def sample_delete_log(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("log_name", request.log_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -726,17 +790,18 @@ def sample_delete_log(): metadata=metadata, ) - def write_log_entries(self, - request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, - *, - log_name: Optional[str] = None, - resource: Optional[monitored_resource_pb2.MonitoredResource] = None, - labels: Optional[MutableMapping[str, str]] = None, - entries: Optional[MutableSequence[log_entry.LogEntry]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging.WriteLogEntriesResponse: + def write_log_entries( + self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent @@ -881,8 +946,7 @@ def sample_write_log_entries(): flattened_params = [log_name, resource, labels, entries] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -917,16 +981,17 @@ def sample_write_log_entries(): # Done; return the response. return response - def list_log_entries(self, - request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, - *, - resource_names: Optional[MutableSequence[str]] = None, - filter: Optional[str] = None, - order_by: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogEntriesPager: + def list_log_entries( + self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see `Exporting @@ -1031,8 +1096,7 @@ def sample_list_log_entries(): flattened_params = [resource_names, filter, order_by] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1076,13 +1140,14 @@ def sample_list_log_entries(): # Done; return the response. return response - def list_monitored_resource_descriptors(self, - request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsPager: + def list_monitored_resource_descriptors( + self, + request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -1168,14 +1233,15 @@ def sample_list_monitored_resource_descriptors(): # Done; return the response. return response - def list_logs(self, - request: Optional[Union[logging.ListLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogsPager: + def list_logs( + self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -1244,8 +1310,7 @@ def sample_list_logs(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1262,11 +1327,13 @@ def sample_list_logs(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1293,13 +1360,14 @@ def sample_list_logs(): # Done; return the response. return response - def tail_log_entries(self, - requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[logging.TailLogEntriesResponse]: + def tail_log_entries( + self, + requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs. @@ -1325,7 +1393,7 @@ def sample_tail_log_entries(): ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # "logging_v2.TailLogEntriesRequest" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] @@ -1425,10 +1493,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1436,7 +1501,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1481,10 +1550,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1492,7 +1558,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1540,21 +1610,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -1562,6 +1629,4 @@ def cancel_operation( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "LoggingServiceV2Client", -) +__all__ = ("LoggingServiceV2Client",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index ee183ce805..601b6ad237 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -46,14 +47,17 @@ class ListLogEntriesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListLogEntriesResponse], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListLogEntriesResponse], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -94,7 +98,7 @@ def __iter__(self) -> Iterator[log_entry.LogEntry]: yield from page.entries def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogEntriesAsyncPager: @@ -114,14 +118,17 @@ class ListLogEntriesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -156,6 +163,7 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): async for page in self.pages: @@ -165,7 +173,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListMonitoredResourceDescriptorsPager: @@ -185,14 +193,17 @@ class ListMonitoredResourceDescriptorsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -233,7 +244,7 @@ def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescripto yield from page.resource_descriptors def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListMonitoredResourceDescriptorsAsyncPager: @@ -253,14 +264,17 @@ class ListMonitoredResourceDescriptorsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -295,6 +309,7 @@ async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsR self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: @@ -304,7 +319,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogsPager: @@ -324,14 +339,17 @@ class ListLogsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListLogsResponse], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListLogsResponse], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -372,7 +390,7 @@ def __iter__(self) -> Iterator[str]: yield from page.log_names def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogsAsyncPager: @@ -392,14 +410,17 @@ class ListLogsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogsResponse]], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogsResponse]], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -434,6 +455,7 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: @@ -443,4 +465,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 25058513ec..48f0b711cd 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -23,11 +23,11 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] -_transport_registry['grpc'] = LoggingServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = LoggingServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport __all__ = ( - 'LoggingServiceV2Transport', - 'LoggingServiceV2GrpcTransport', - 'LoggingServiceV2GrpcAsyncIOTransport', + "LoggingServiceV2Transport", + "LoggingServiceV2GrpcTransport", + "LoggingServiceV2GrpcAsyncIOTransport", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index f9d40b5621..d6471e4ac4 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -24,11 +24,11 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -40,33 +40,36 @@ class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) + # fmt: on - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -102,10 +105,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -113,15 +116,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -242,70 +249,100 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ raise NotImplementedError() + # fmt: off @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_log( + self, + ) -> Callable[ + [logging.DeleteLogRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Union[ - logging.WriteLogEntriesResponse, - Awaitable[logging.WriteLogEntriesResponse] - ]]: + def write_log_entries( + self, + ) -> Callable[ + [logging.WriteLogEntriesRequest], + Union[ + logging.WriteLogEntriesResponse, + Awaitable[logging.WriteLogEntriesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Union[ - logging.ListLogEntriesResponse, - Awaitable[logging.ListLogEntriesResponse] - ]]: + def list_log_entries( + self, + ) -> Callable[ + [logging.ListLogEntriesRequest], + Union[ + logging.ListLogEntriesResponse, + Awaitable[logging.ListLogEntriesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Union[ - logging.ListMonitoredResourceDescriptorsResponse, - Awaitable[logging.ListMonitoredResourceDescriptorsResponse] - ]]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Union[ + logging.ListMonitoredResourceDescriptorsResponse, + Awaitable[logging.ListMonitoredResourceDescriptorsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Union[ - logging.ListLogsResponse, - Awaitable[logging.ListLogsResponse] - ]]: + def list_logs( + self, + ) -> Callable[ + [logging.ListLogsRequest], + Union[ + logging.ListLogsResponse, + Awaitable[logging.ListLogsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Union[ - logging.TailLogEntriesResponse, - Awaitable[logging.TailLogEntriesResponse] - ]]: + def tail_log_entries( + self, + ) -> Callable[ + [logging.TailLogEntriesRequest], + Union[ + logging.TailLogEntriesResponse, + Awaitable[logging.TailLogEntriesResponse] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -319,19 +356,13 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property @@ -339,6 +370,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'LoggingServiceV2Transport', -) +__all__ = ("LoggingServiceV2Transport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 8edc617027..7d9e6f3c67 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -21,7 +21,7 @@ from google.api_core import grpc_helpers from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -31,12 +31,13 @@ import proto # type: ignore from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -56,10 +57,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -67,7 +70,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -93,7 +96,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -115,28 +118,31 @@ class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -210,7 +216,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -219,7 +226,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -254,19 +262,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -302,19 +312,18 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - empty_pb2.Empty]: + def delete_log( + self, + ) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log for the \_Default Log @@ -333,18 +342,18 @@ def delete_log(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log'] + return self._stubs["delete_log"] @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - logging.WriteLogEntriesResponse]: + def write_log_entries( + self, + ) -> Callable[[logging.WriteLogEntriesRequest], logging.WriteLogEntriesResponse]: r"""Return a callable for the write log entries method over gRPC. Writes log entries to Logging. This API method is the @@ -365,18 +374,18 @@ def write_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, ) - return self._stubs['write_log_entries'] + return self._stubs["write_log_entries"] @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - logging.ListLogEntriesResponse]: + def list_log_entries( + self, + ) -> Callable[[logging.ListLogEntriesRequest], logging.ListLogEntriesResponse]: r"""Return a callable for the list log entries method over gRPC. Lists log entries. Use this method to retrieve log entries that @@ -394,18 +403,18 @@ def list_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, ) - return self._stubs['list_log_entries'] + return self._stubs["list_log_entries"] @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - logging.ListMonitoredResourceDescriptorsResponse]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[[logging.ListMonitoredResourceDescriptorsRequest], logging.ListMonitoredResourceDescriptorsResponse]: r"""Return a callable for the list monitored resource descriptors method over gRPC. @@ -422,18 +431,18 @@ def list_monitored_resource_descriptors(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs["list_monitored_resource_descriptors"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, ) - return self._stubs['list_monitored_resource_descriptors'] + return self._stubs["list_monitored_resource_descriptors"] @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - logging.ListLogsResponse]: + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], logging.ListLogsResponse]: r"""Return a callable for the list logs method over gRPC. Lists the logs in projects, organizations, folders, @@ -450,18 +459,18 @@ def list_logs(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, ) - return self._stubs['list_logs'] + return self._stubs["list_logs"] @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - logging.TailLogEntriesResponse]: + def tail_log_entries( + self, + ) -> Callable[[logging.TailLogEntriesRequest], logging.TailLogEntriesResponse]: r"""Return a callable for the tail log entries method over gRPC. Streaming read of log entries as they are ingested. @@ -478,13 +487,13 @@ def tail_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) - return self._stubs['tail_log_entries'] + return self._stubs["tail_log_entries"] def close(self): self._logged_channel.close() @@ -493,8 +502,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -511,8 +519,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -529,8 +536,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -548,6 +554,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'LoggingServiceV2GrpcTransport', -) +__all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 92aa1d5256..116639c3bc 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -24,23 +24,24 @@ from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -60,10 +61,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -71,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -97,7 +100,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -124,13 +127,15 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -161,29 +166,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -257,7 +264,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -266,7 +274,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -318,9 +327,9 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Awaitable[empty_pb2.Empty]]: + def delete_log( + self, + ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log for the \_Default Log @@ -339,18 +348,18 @@ def delete_log(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log'] + return self._stubs["delete_log"] @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Awaitable[logging.WriteLogEntriesResponse]]: + def write_log_entries( + self, + ) -> Callable[[logging.WriteLogEntriesRequest], Awaitable[logging.WriteLogEntriesResponse]]: r"""Return a callable for the write log entries method over gRPC. Writes log entries to Logging. This API method is the @@ -371,18 +380,18 @@ def write_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, ) - return self._stubs['write_log_entries'] + return self._stubs["write_log_entries"] @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Awaitable[logging.ListLogEntriesResponse]]: + def list_log_entries( + self, + ) -> Callable[[logging.ListLogEntriesRequest], Awaitable[logging.ListLogEntriesResponse]]: r"""Return a callable for the list log entries method over gRPC. Lists log entries. Use this method to retrieve log entries that @@ -400,18 +409,18 @@ def list_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, ) - return self._stubs['list_log_entries'] + return self._stubs["list_log_entries"] @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[[logging.ListMonitoredResourceDescriptorsRequest], Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: r"""Return a callable for the list monitored resource descriptors method over gRPC. @@ -428,18 +437,18 @@ def list_monitored_resource_descriptors(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs["list_monitored_resource_descriptors"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, ) - return self._stubs['list_monitored_resource_descriptors'] + return self._stubs["list_monitored_resource_descriptors"] @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Awaitable[logging.ListLogsResponse]]: + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], Awaitable[logging.ListLogsResponse]]: r"""Return a callable for the list logs method over gRPC. Lists the logs in projects, organizations, folders, @@ -456,18 +465,18 @@ def list_logs(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, ) - return self._stubs['list_logs'] + return self._stubs["list_logs"] @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Awaitable[logging.TailLogEntriesResponse]]: + def tail_log_entries( + self, + ) -> Callable[[logging.TailLogEntriesRequest], Awaitable[logging.TailLogEntriesResponse]]: r"""Return a callable for the tail log entries method over gRPC. Streaming read of log entries as they are ingested. @@ -484,16 +493,16 @@ def tail_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) - return self._stubs['tail_log_entries'] + return self._stubs["tail_log_entries"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.delete_log: self._wrap_method( self.delete_log, @@ -624,8 +633,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -642,8 +650,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -660,8 +667,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -675,6 +681,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'LoggingServiceV2GrpcAsyncIOTransport', -) +__all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 50d894b600..41a1ef4a6a 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -17,6 +17,6 @@ from .async_client import MetricsServiceV2AsyncClient __all__ = ( - 'MetricsServiceV2Client', - 'MetricsServiceV2AsyncClient', + "MetricsServiceV2Client", + "MetricsServiceV2AsyncClient", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index ad659243d7..ec468222b2 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.logging_v2 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -38,7 +49,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -46,12 +57,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class MetricsServiceV2AsyncClient: """Service for configuring logs-based metrics.""" @@ -174,12 +187,14 @@ def universe_domain(self) -> str: get_transport_class = MetricsServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the metrics service v2 async client. Args: @@ -234,31 +249,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.MetricsServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.MetricsServiceV2", "credentialsType": None, - } + }, ) - async def list_log_metrics(self, - request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogMetricsAsyncPager: + async def list_log_metrics( + self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. .. code-block:: python @@ -325,8 +342,7 @@ async def sample_list_log_metrics(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -344,11 +360,13 @@ async def sample_list_log_metrics(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -375,14 +393,15 @@ async def sample_list_log_metrics(): # Done; return the response. return response - async def get_log_metric(self, - request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def get_log_metric( + self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. .. code-block:: python @@ -454,8 +473,7 @@ async def sample_get_log_metric(): flattened_params = [metric_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -473,11 +491,13 @@ async def sample_get_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -493,15 +513,16 @@ async def sample_get_log_metric(): # Done; return the response. return response - async def create_log_metric(self, - request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, - *, - parent: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def create_log_metric( + self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. .. code-block:: python @@ -589,8 +610,7 @@ async def sample_create_log_metric(): flattened_params = [parent, metric] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -610,11 +630,13 @@ async def sample_create_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -630,15 +652,16 @@ async def sample_create_log_metric(): # Done; return the response. return response - async def update_log_metric(self, - request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def update_log_metric( + self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. .. code-block:: python @@ -725,8 +748,7 @@ async def sample_update_log_metric(): flattened_params = [metric_name, metric] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -746,11 +768,13 @@ async def sample_update_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -766,14 +790,15 @@ async def sample_update_log_metric(): # Done; return the response. return response - async def delete_log_metric(self, - request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_log_metric( + self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a logs-based metric. .. code-block:: python @@ -826,8 +851,7 @@ async def sample_delete_log_metric(): flattened_params = [metric_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -845,11 +869,13 @@ async def sample_delete_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -899,17 +925,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -951,17 +978,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1006,16 +1034,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "MetricsServiceV2AsyncClient": return self @@ -1023,12 +1053,11 @@ async def __aenter__(self) -> "MetricsServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "MetricsServiceV2AsyncClient", -) +__all__ = ("MetricsServiceV2AsyncClient",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 6ff3b7ac02..a07713bddf 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -52,7 +65,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport @@ -66,13 +79,15 @@ class MetricsServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetricsServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MetricsServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -108,9 +123,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -119,16 +132,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -142,21 +154,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -191,7 +201,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): MetricsServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -208,73 +219,116 @@ def transport(self) -> MetricsServiceV2Transport: return self._transport @staticmethod - def log_metric_path(project: str,metric: str,) -> str: + def log_metric_path( + project: str, + metric: str, + ) -> str: """Returns a fully-qualified log_metric string.""" - return "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + return "projects/{project}/metrics/{metric}".format( + project=project, + metric=metric, + ) @staticmethod - def parse_log_metric_path(path: str) -> Dict[str,str]: + def parse_log_metric_path( + path: str, + ) -> Dict[str, str]: """Parses a log_metric path into its component segments.""" m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -306,8 +360,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = MetricsServiceV2Client._use_client_cert_effective() @@ -437,7 +493,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -477,12 +533,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the metrics service v2 client. Args: @@ -539,12 +597,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetricsServiceV2Client._read_environment_variables() self._client_cert_source = MetricsServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = MetricsServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -564,22 +622,22 @@ def __init__(self, *, if transport_provided: # transport is a MetricsServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(MetricsServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - MetricsServiceV2Client._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or MetricsServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -609,25 +667,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.MetricsServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.MetricsServiceV2", "credentialsType": None, - } + }, ) - def list_log_metrics(self, - request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogMetricsPager: + def list_log_metrics( + self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. .. code-block:: python @@ -694,8 +755,7 @@ def sample_list_log_metrics(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -712,11 +772,13 @@ def sample_list_log_metrics(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -743,14 +805,15 @@ def sample_list_log_metrics(): # Done; return the response. return response - def get_log_metric(self, - request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def get_log_metric( + self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. .. code-block:: python @@ -822,8 +885,7 @@ def sample_get_log_metric(): flattened_params = [metric_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -840,11 +902,13 @@ def sample_get_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -860,15 +924,16 @@ def sample_get_log_metric(): # Done; return the response. return response - def create_log_metric(self, - request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, - *, - parent: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def create_log_metric( + self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. .. code-block:: python @@ -956,8 +1021,7 @@ def sample_create_log_metric(): flattened_params = [parent, metric] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -976,11 +1040,13 @@ def sample_create_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -996,15 +1062,16 @@ def sample_create_log_metric(): # Done; return the response. return response - def update_log_metric(self, - request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def update_log_metric( + self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. .. code-block:: python @@ -1091,8 +1158,7 @@ def sample_update_log_metric(): flattened_params = [metric_name, metric] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1111,11 +1177,13 @@ def sample_update_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1131,14 +1199,15 @@ def sample_update_log_metric(): # Done; return the response. return response - def delete_log_metric(self, - request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_log_metric( + self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a logs-based metric. .. code-block:: python @@ -1191,8 +1260,7 @@ def sample_delete_log_metric(): flattened_params = [metric_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1209,11 +1277,13 @@ def sample_delete_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1276,10 +1346,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1287,7 +1354,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1332,10 +1403,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1343,7 +1411,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1391,21 +1463,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -1413,6 +1482,4 @@ def cancel_operation( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "MetricsServiceV2Client", -) +__all__ = ("MetricsServiceV2Client",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 3d44cf6e4c..71c24ff76e 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -44,14 +45,17 @@ class ListLogMetricsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_metrics.ListLogMetricsResponse], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_metrics.ListLogMetricsResponse], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -92,7 +96,7 @@ def __iter__(self) -> Iterator[logging_metrics.LogMetric]: yield from page.metrics def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogMetricsAsyncPager: @@ -112,14 +116,17 @@ class ListLogMetricsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -154,6 +161,7 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): async for page in self.pages: @@ -163,4 +171,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index a6eb39e80f..4975feb994 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -23,11 +23,11 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] -_transport_registry['grpc'] = MetricsServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = MetricsServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport __all__ = ( - 'MetricsServiceV2Transport', - 'MetricsServiceV2GrpcTransport', - 'MetricsServiceV2GrpcAsyncIOTransport', + "MetricsServiceV2Transport", + "MetricsServiceV2GrpcTransport", + "MetricsServiceV2GrpcAsyncIOTransport", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index fef46ac38e..b8b167f710 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -24,11 +24,11 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -40,33 +40,36 @@ class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) + # fmt: on - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -102,10 +105,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -113,15 +116,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -215,61 +222,86 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ raise NotImplementedError() + # fmt: off @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Union[ - logging_metrics.ListLogMetricsResponse, - Awaitable[logging_metrics.ListLogMetricsResponse] - ]]: + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Union[ + logging_metrics.ListLogMetricsResponse, + Awaitable[logging_metrics.ListLogMetricsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def get_log_metric( + self, + ) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def create_log_metric( + self, + ) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def update_log_metric( + self, + ) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_log_metric( + self, + ) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -283,19 +315,13 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property @@ -303,6 +329,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'MetricsServiceV2Transport', -) +__all__ = ("MetricsServiceV2Transport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index d2c41d01e8..fb2a98c878 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -21,7 +21,7 @@ from google.api_core import grpc_helpers from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -31,12 +31,13 @@ import proto # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -56,10 +57,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -67,7 +70,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -93,7 +96,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -115,28 +118,31 @@ class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -210,7 +216,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -219,7 +226,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -254,19 +262,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -302,19 +312,18 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - logging_metrics.ListLogMetricsResponse]: + def list_log_metrics( + self, + ) -> Callable[[logging_metrics.ListLogMetricsRequest], logging_metrics.ListLogMetricsResponse]: r"""Return a callable for the list log metrics method over gRPC. Lists logs-based metrics. @@ -329,18 +338,18 @@ def list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['list_log_metrics'] + return self._stubs["list_log_metrics"] @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - logging_metrics.LogMetric]: + def get_log_metric( + self, + ) -> Callable[[logging_metrics.GetLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the get log metric method over gRPC. Gets a logs-based metric. @@ -355,18 +364,18 @@ def get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['get_log_metric'] + return self._stubs["get_log_metric"] @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - logging_metrics.LogMetric]: + def create_log_metric( + self, + ) -> Callable[[logging_metrics.CreateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the create log metric method over gRPC. Creates a logs-based metric. @@ -381,18 +390,18 @@ def create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['create_log_metric'] + return self._stubs["create_log_metric"] @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - logging_metrics.LogMetric]: + def update_log_metric( + self, + ) -> Callable[[logging_metrics.UpdateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the update log metric method over gRPC. Creates or updates a logs-based metric. @@ -407,18 +416,18 @@ def update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['update_log_metric'] + return self._stubs["update_log_metric"] @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - empty_pb2.Empty]: + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -433,13 +442,13 @@ def delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log_metric'] + return self._stubs["delete_log_metric"] def close(self): self._logged_channel.close() @@ -448,8 +457,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -466,8 +474,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -484,8 +491,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -503,6 +509,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'MetricsServiceV2GrpcTransport', -) +__all__ = ("MetricsServiceV2GrpcTransport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 15b1ab3ad8..66c283fc46 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -24,23 +24,24 @@ from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -60,10 +61,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -71,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -97,7 +100,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -124,13 +127,15 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -161,29 +166,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -257,7 +264,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -266,7 +274,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -318,9 +327,9 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Awaitable[logging_metrics.ListLogMetricsResponse]]: + def list_log_metrics( + self, + ) -> Callable[[logging_metrics.ListLogMetricsRequest], Awaitable[logging_metrics.ListLogMetricsResponse]]: r"""Return a callable for the list log metrics method over gRPC. Lists logs-based metrics. @@ -335,18 +344,18 @@ def list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['list_log_metrics'] + return self._stubs["list_log_metrics"] @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def get_log_metric( + self, + ) -> Callable[[logging_metrics.GetLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the get log metric method over gRPC. Gets a logs-based metric. @@ -361,18 +370,18 @@ def get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['get_log_metric'] + return self._stubs["get_log_metric"] @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def create_log_metric( + self, + ) -> Callable[[logging_metrics.CreateLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the create log metric method over gRPC. Creates a logs-based metric. @@ -387,18 +396,18 @@ def create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['create_log_metric'] + return self._stubs["create_log_metric"] @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def update_log_metric( + self, + ) -> Callable[[logging_metrics.UpdateLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the update log metric method over gRPC. Creates or updates a logs-based metric. @@ -413,18 +422,18 @@ def update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['update_log_metric'] + return self._stubs["update_log_metric"] @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Awaitable[empty_pb2.Empty]]: + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -439,16 +448,16 @@ def delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log_metric'] + return self._stubs["delete_log_metric"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_log_metrics: self._wrap_method( self.list_log_metrics, @@ -552,8 +561,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -570,8 +578,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -588,8 +595,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -603,6 +609,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'MetricsServiceV2GrpcAsyncIOTransport', -) +__all__ = ("MetricsServiceV2GrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py index 3023b14aa8..efea793076 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -99,80 +99,80 @@ ) __all__ = ( - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryDataset', - 'BigQueryOptions', - 'BucketMetadata', - 'CmekSettings', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesRequest', - 'CopyLogEntriesResponse', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateLinkRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteLinkRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetLinkRequest', - 'GetSettingsRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'IndexConfig', - 'Link', - 'LinkMetadata', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListLinksRequest', - 'ListLinksResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LocationMetadata', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'Settings', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSettingsRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'IndexType', - 'LifecycleState', - 'OperationState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 695393863f..8ff2da4023 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -27,15 +27,17 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", }, ) +# fmt: on class LogEntry(proto.Message): @@ -249,18 +251,18 @@ class LogEntry(proto.Message): proto_payload: any_pb2.Any = proto.Field( proto.MESSAGE, number=2, - oneof='payload', + oneof="payload", message=any_pb2.Any, ) text_payload: str = proto.Field( proto.STRING, number=3, - oneof='payload', + oneof="payload", ) json_payload: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=6, - oneof='payload', + oneof="payload", message=struct_pb2.Struct, ) timestamp: timestamp_pb2.Timestamp = proto.Field( @@ -292,10 +294,10 @@ class LogEntry(proto.Message): proto.STRING, number=11, ) - operation: 'LogEntryOperation' = proto.Field( + operation: "LogEntryOperation" = proto.Field( proto.MESSAGE, number=15, - message='LogEntryOperation', + message="LogEntryOperation", ) trace: str = proto.Field( proto.STRING, @@ -309,15 +311,15 @@ class LogEntry(proto.Message): proto.BOOL, number=30, ) - source_location: 'LogEntrySourceLocation' = proto.Field( + source_location: "LogEntrySourceLocation" = proto.Field( proto.MESSAGE, number=23, - message='LogEntrySourceLocation', + message="LogEntrySourceLocation", ) - split: 'LogSplit' = proto.Field( + split: "LogSplit" = proto.Field( proto.MESSAGE, number=35, - message='LogSplit', + message="LogSplit", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index 5b1dd80ceb..0c2d9735f9 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -25,23 +25,25 @@ from google.rpc import status_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'DeleteLogRequest', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', + "DeleteLogRequest", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListLogsRequest", + "ListLogsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", }, ) +# fmt: on class DeleteLogRequest(proto.Message): @@ -191,8 +193,7 @@ class WriteLogEntriesRequest(proto.Message): class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. - """ + r"""Result returned from WriteLogEntries.""" class WriteLogEntriesPartialErrors(proto.Message): @@ -556,6 +557,7 @@ class SuppressionInfo(proto.Message): A lower bound on the count of entries omitted due to ``reason``. """ + class Reason(proto.Enum): r"""An indicator of why entries were omitted. @@ -575,10 +577,10 @@ class Reason(proto.Enum): RATE_LIMIT = 1 NOT_CONSUMED = 2 - reason: 'TailLogEntriesResponse.SuppressionInfo.Reason' = proto.Field( + reason: "TailLogEntriesResponse.SuppressionInfo.Reason" = proto.Field( proto.ENUM, number=1, - enum='TailLogEntriesResponse.SuppressionInfo.Reason', + enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) suppressed_count: int = proto.Field( proto.INT32, diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 292f690774..38e6295c6f 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -23,64 +23,66 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'OperationState', - 'LifecycleState', - 'IndexType', - 'IndexConfig', - 'LogBucket', - 'LogView', - 'LogSink', - 'BigQueryDataset', - 'Link', - 'BigQueryOptions', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'CreateBucketRequest', - 'UpdateBucketRequest', - 'GetBucketRequest', - 'DeleteBucketRequest', - 'UndeleteBucketRequest', - 'ListViewsRequest', - 'ListViewsResponse', - 'CreateViewRequest', - 'UpdateViewRequest', - 'GetViewRequest', - 'DeleteViewRequest', - 'ListSinksRequest', - 'ListSinksResponse', - 'GetSinkRequest', - 'CreateSinkRequest', - 'UpdateSinkRequest', - 'DeleteSinkRequest', - 'CreateLinkRequest', - 'DeleteLinkRequest', - 'ListLinksRequest', - 'ListLinksResponse', - 'GetLinkRequest', - 'LogExclusion', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'GetExclusionRequest', - 'CreateExclusionRequest', - 'UpdateExclusionRequest', - 'DeleteExclusionRequest', - 'GetCmekSettingsRequest', - 'UpdateCmekSettingsRequest', - 'CmekSettings', - 'GetSettingsRequest', - 'UpdateSettingsRequest', - 'Settings', - 'CopyLogEntriesRequest', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesResponse', - 'BucketMetadata', - 'LinkMetadata', - 'LocationMetadata', + "OperationState", + "LifecycleState", + "IndexType", + "IndexConfig", + "LogBucket", + "LogView", + "LogSink", + "BigQueryDataset", + "Link", + "BigQueryOptions", + "ListBucketsRequest", + "ListBucketsResponse", + "CreateBucketRequest", + "UpdateBucketRequest", + "GetBucketRequest", + "DeleteBucketRequest", + "UndeleteBucketRequest", + "ListViewsRequest", + "ListViewsResponse", + "CreateViewRequest", + "UpdateViewRequest", + "GetViewRequest", + "DeleteViewRequest", + "ListSinksRequest", + "ListSinksResponse", + "GetSinkRequest", + "CreateSinkRequest", + "UpdateSinkRequest", + "DeleteSinkRequest", + "CreateLinkRequest", + "DeleteLinkRequest", + "ListLinksRequest", + "ListLinksResponse", + "GetLinkRequest", + "LogExclusion", + "ListExclusionsRequest", + "ListExclusionsResponse", + "GetExclusionRequest", + "CreateExclusionRequest", + "UpdateExclusionRequest", + "DeleteExclusionRequest", + "GetCmekSettingsRequest", + "UpdateCmekSettingsRequest", + "CmekSettings", + "GetSettingsRequest", + "UpdateSettingsRequest", + "Settings", + "CopyLogEntriesRequest", + "CopyLogEntriesMetadata", + "CopyLogEntriesResponse", + "BucketMetadata", + "LinkMetadata", + "LocationMetadata", }, ) +# fmt: on class OperationState(proto.Enum): @@ -191,10 +193,10 @@ class IndexConfig(proto.Message): proto.STRING, number=1, ) - type_: 'IndexType' = proto.Field( + type_: "IndexType" = proto.Field( proto.ENUM, number=2, - enum='IndexType', + enum="IndexType", ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -300,10 +302,10 @@ class LogBucket(proto.Message): proto.BOOL, number=9, ) - lifecycle_state: 'LifecycleState' = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=12, - enum='LifecycleState', + enum="LifecycleState", ) analytics_enabled: bool = proto.Field( proto.BOOL, @@ -313,15 +315,15 @@ class LogBucket(proto.Message): proto.STRING, number=15, ) - index_configs: MutableSequence['IndexConfig'] = proto.RepeatedField( + index_configs: MutableSequence["IndexConfig"] = proto.RepeatedField( proto.MESSAGE, number=17, - message='IndexConfig', + message="IndexConfig", ) - cmek_settings: 'CmekSettings' = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=19, - message='CmekSettings', + message="CmekSettings", ) @@ -500,6 +502,7 @@ class LogSink(proto.Message): sink. This field may not be present for older sinks. """ + class VersionFormat(proto.Enum): r"""Deprecated. This is unused. @@ -536,10 +539,10 @@ class VersionFormat(proto.Enum): proto.BOOL, number=19, ) - exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=16, - message='LogExclusion', + message="LogExclusion", ) output_version_format: VersionFormat = proto.Field( proto.ENUM, @@ -554,11 +557,11 @@ class VersionFormat(proto.Enum): proto.BOOL, number=9, ) - bigquery_options: 'BigQueryOptions' = proto.Field( + bigquery_options: "BigQueryOptions" = proto.Field( proto.MESSAGE, number=12, - oneof='options', - message='BigQueryOptions', + oneof="options", + message="BigQueryOptions", ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -644,15 +647,15 @@ class Link(proto.Message): number=3, message=timestamp_pb2.Timestamp, ) - lifecycle_state: 'LifecycleState' = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=4, - enum='LifecycleState', + enum="LifecycleState", ) - bigquery_dataset: 'BigQueryDataset' = proto.Field( + bigquery_dataset: "BigQueryDataset" = proto.Field( proto.MESSAGE, number=5, - message='BigQueryDataset', + message="BigQueryDataset", ) @@ -755,10 +758,10 @@ class ListBucketsResponse(proto.Message): def raw_page(self): return self - buckets: MutableSequence['LogBucket'] = proto.RepeatedField( + buckets: MutableSequence["LogBucket"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogBucket', + message="LogBucket", ) next_page_token: str = proto.Field( proto.STRING, @@ -800,10 +803,10 @@ class CreateBucketRequest(proto.Message): proto.STRING, number=2, ) - bucket: 'LogBucket' = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=3, - message='LogBucket', + message="LogBucket", ) @@ -842,10 +845,10 @@ class UpdateBucketRequest(proto.Message): proto.STRING, number=1, ) - bucket: 'LogBucket' = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=2, - message='LogBucket', + message="LogBucket", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -985,10 +988,10 @@ class ListViewsResponse(proto.Message): def raw_page(self): return self - views: MutableSequence['LogView'] = proto.RepeatedField( + views: MutableSequence["LogView"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogView', + message="LogView", ) next_page_token: str = proto.Field( proto.STRING, @@ -1027,10 +1030,10 @@ class CreateViewRequest(proto.Message): proto.STRING, number=2, ) - view: 'LogView' = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=3, - message='LogView', + message="LogView", ) @@ -1066,10 +1069,10 @@ class UpdateViewRequest(proto.Message): proto.STRING, number=1, ) - view: 'LogView' = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=2, - message='LogView', + message="LogView", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -1181,10 +1184,10 @@ class ListSinksResponse(proto.Message): def raw_page(self): return self - sinks: MutableSequence['LogSink'] = proto.RepeatedField( + sinks: MutableSequence["LogSink"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogSink', + message="LogSink", ) next_page_token: str = proto.Field( proto.STRING, @@ -1259,10 +1262,10 @@ class CreateSinkRequest(proto.Message): proto.STRING, number=1, ) - sink: 'LogSink' = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, - message='LogSink', + message="LogSink", ) unique_writer_identity: bool = proto.Field( proto.BOOL, @@ -1331,10 +1334,10 @@ class UpdateSinkRequest(proto.Message): proto.STRING, number=1, ) - sink: 'LogSink' = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, - message='LogSink', + message="LogSink", ) unique_writer_identity: bool = proto.Field( proto.BOOL, @@ -1399,10 +1402,10 @@ class CreateLinkRequest(proto.Message): proto.STRING, number=1, ) - link: 'Link' = proto.Field( + link: "Link" = proto.Field( proto.MESSAGE, number=2, - message='Link', + message="Link", ) link_id: str = proto.Field( proto.STRING, @@ -1481,10 +1484,10 @@ class ListLinksResponse(proto.Message): def raw_page(self): return self - links: MutableSequence['Link'] = proto.RepeatedField( + links: MutableSequence["Link"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Link', + message="Link", ) next_page_token: str = proto.Field( proto.STRING, @@ -1643,10 +1646,10 @@ class ListExclusionsResponse(proto.Message): def raw_page(self): return self - exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogExclusion', + message="LogExclusion", ) next_page_token: str = proto.Field( proto.STRING, @@ -1708,10 +1711,10 @@ class CreateExclusionRequest(proto.Message): proto.STRING, number=1, ) - exclusion: 'LogExclusion' = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, - message='LogExclusion', + message="LogExclusion", ) @@ -1752,10 +1755,10 @@ class UpdateExclusionRequest(proto.Message): proto.STRING, number=1, ) - exclusion: 'LogExclusion' = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, - message='LogExclusion', + message="LogExclusion", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -1874,10 +1877,10 @@ class UpdateCmekSettingsRequest(proto.Message): proto.STRING, number=1, ) - cmek_settings: 'CmekSettings' = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=2, - message='CmekSettings', + message="CmekSettings", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2073,10 +2076,10 @@ class UpdateSettingsRequest(proto.Message): proto.STRING, number=1, ) - settings: 'Settings' = proto.Field( + settings: "Settings" = proto.Field( proto.MESSAGE, number=2, - message='Settings', + message="Settings", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2249,19 +2252,19 @@ class CopyLogEntriesMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) cancellation_requested: bool = proto.Field( proto.BOOL, number=4, ) - request: 'CopyLogEntriesRequest' = proto.Field( + request: "CopyLogEntriesRequest" = proto.Field( proto.MESSAGE, number=5, - message='CopyLogEntriesRequest', + message="CopyLogEntriesRequest", ) progress: int = proto.Field( proto.INT32, @@ -2324,22 +2327,22 @@ class BucketMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) - create_bucket_request: 'CreateBucketRequest' = proto.Field( + create_bucket_request: "CreateBucketRequest" = proto.Field( proto.MESSAGE, number=4, - oneof='request', - message='CreateBucketRequest', + oneof="request", + message="CreateBucketRequest", ) - update_bucket_request: 'UpdateBucketRequest' = proto.Field( + update_bucket_request: "UpdateBucketRequest" = proto.Field( proto.MESSAGE, number=5, - oneof='request', - message='UpdateBucketRequest', + oneof="request", + message="UpdateBucketRequest", ) @@ -2380,22 +2383,22 @@ class LinkMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) - create_link_request: 'CreateLinkRequest' = proto.Field( + create_link_request: "CreateLinkRequest" = proto.Field( proto.MESSAGE, number=4, - oneof='request', - message='CreateLinkRequest', + oneof="request", + message="CreateLinkRequest", ) - delete_link_request: 'DeleteLinkRequest' = proto.Field( + delete_link_request: "DeleteLinkRequest" = proto.Field( proto.MESSAGE, number=5, - oneof='request', - message='DeleteLinkRequest', + oneof="request", + message="DeleteLinkRequest", ) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index d26267d8cf..97874d0eb6 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -24,18 +24,20 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'LogMetric', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'GetLogMetricRequest', - 'CreateLogMetricRequest', - 'UpdateLogMetricRequest', - 'DeleteLogMetricRequest', + "LogMetric", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "GetLogMetricRequest", + "CreateLogMetricRequest", + "UpdateLogMetricRequest", + "DeleteLogMetricRequest", }, ) +# fmt: on class LogMetric(proto.Message): @@ -180,6 +182,7 @@ class LogMetric(proto.Message): updated this metric. The v2 format is used by default and cannot be changed. """ + class ApiVersion(proto.Enum): r"""Logging API version. @@ -302,10 +305,10 @@ class ListLogMetricsResponse(proto.Message): def raw_page(self): return self - metrics: MutableSequence['LogMetric'] = proto.RepeatedField( + metrics: MutableSequence["LogMetric"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogMetric', + message="LogMetric", ) next_page_token: str = proto.Field( proto.STRING, @@ -353,10 +356,10 @@ class CreateLogMetricRequest(proto.Message): proto.STRING, number=1, ) - metric: 'LogMetric' = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, - message='LogMetric', + message="LogMetric", ) @@ -383,10 +386,10 @@ class UpdateLogMetricRequest(proto.Message): proto.STRING, number=1, ) - metric: 'LogMetric' = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, - message='LogMetric', + message="LogMetric", ) diff --git a/tests/integration/goldens/logging/noxfile.py b/tests/integration/goldens/logging/noxfile.py index cafd945c0f..70216a7e57 100755 --- a/tests/integration/goldens/logging/noxfile.py +++ b/tests/integration/goldens/logging/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -FORMAT_PATHS = ["google", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # We're most interested in ensuring that code is formatted properly diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 646ec1476b..a5a5485915 100755 --- a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -44,7 +44,7 @@ async def sample_tail_log_entries(): ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # "logging_v2.TailLogEntriesRequest" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index aab2284789..04ca57f4fd 100755 --- a/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -44,7 +44,7 @@ def sample_tail_log_entries(): ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # "logging_v2.TailLogEntriesRequest" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 9df459055f..ce9d7029dd 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -964,7 +964,7 @@ def test_list_buckets(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_buckets(request) @@ -976,7 +976,7 @@ def test_list_buckets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_buckets_non_empty_request_with_auto_populated_field(): @@ -991,8 +991,8 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListBucketsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1004,8 +1004,8 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListBucketsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_buckets_use_cached_wrapped_rpc(): @@ -1091,7 +1091,7 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_buckets(request) @@ -1103,7 +1103,7 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1119,7 +1119,7 @@ def test_list_buckets_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1151,7 +1151,7 @@ async def test_list_buckets_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1187,7 +1187,7 @@ def test_list_buckets_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_buckets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1195,7 +1195,7 @@ def test_list_buckets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1209,7 +1209,7 @@ def test_list_buckets_flattened_error(): with pytest.raises(ValueError): client.list_buckets( logging_config.ListBucketsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -1229,7 +1229,7 @@ async def test_list_buckets_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_buckets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1237,7 +1237,7 @@ async def test_list_buckets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -1251,7 +1251,7 @@ async def test_list_buckets_flattened_error_async(): with pytest.raises(ValueError): await client.list_buckets( logging_config.ListBucketsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1471,13 +1471,13 @@ def test_get_bucket(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.get_bucket(request) @@ -1489,13 +1489,13 @@ def test_get_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_get_bucket_non_empty_request_with_auto_populated_field(): @@ -1510,7 +1510,7 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1522,7 +1522,7 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetBucketRequest( - name='name_value', + name="name_value", ) def test_get_bucket_use_cached_wrapped_rpc(): @@ -1608,13 +1608,13 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) response = await client.get_bucket(request) @@ -1626,13 +1626,13 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -1648,7 +1648,7 @@ def test_get_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1680,7 +1680,7 @@ async def test_get_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1746,8 +1746,8 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1759,8 +1759,8 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) def test_create_bucket_async_use_cached_wrapped_rpc(): @@ -1883,7 +1883,7 @@ def test_create_bucket_async_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1915,7 +1915,7 @@ async def test_create_bucket_async_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1981,7 +1981,7 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1993,7 +1993,7 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) def test_update_bucket_async_use_cached_wrapped_rpc(): @@ -2116,7 +2116,7 @@ def test_update_bucket_async_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2148,7 +2148,7 @@ async def test_update_bucket_async_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2190,13 +2190,13 @@ def test_create_bucket(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.create_bucket(request) @@ -2208,13 +2208,13 @@ def test_create_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_create_bucket_non_empty_request_with_auto_populated_field(): @@ -2229,8 +2229,8 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2242,8 +2242,8 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) def test_create_bucket_use_cached_wrapped_rpc(): @@ -2329,13 +2329,13 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) response = await client.create_bucket(request) @@ -2347,13 +2347,13 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -2369,7 +2369,7 @@ def test_create_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2401,7 +2401,7 @@ async def test_create_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2443,13 +2443,13 @@ def test_update_bucket(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.update_bucket(request) @@ -2461,13 +2461,13 @@ def test_update_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_update_bucket_non_empty_request_with_auto_populated_field(): @@ -2482,7 +2482,7 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2494,7 +2494,7 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) def test_update_bucket_use_cached_wrapped_rpc(): @@ -2580,13 +2580,13 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) response = await client.update_bucket(request) @@ -2598,13 +2598,13 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -2620,7 +2620,7 @@ def test_update_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2652,7 +2652,7 @@ async def test_update_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2718,7 +2718,7 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2730,7 +2730,7 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteBucketRequest( - name='name_value', + name="name_value", ) def test_delete_bucket_use_cached_wrapped_rpc(): @@ -2841,7 +2841,7 @@ def test_delete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2873,7 +2873,7 @@ async def test_delete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2939,7 +2939,7 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UndeleteBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2951,7 +2951,7 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UndeleteBucketRequest( - name='name_value', + name="name_value", ) def test_undelete_bucket_use_cached_wrapped_rpc(): @@ -3062,7 +3062,7 @@ def test_undelete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3094,7 +3094,7 @@ async def test_undelete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3136,7 +3136,7 @@ def test_list_views(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_views(request) @@ -3148,7 +3148,7 @@ def test_list_views(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_views_non_empty_request_with_auto_populated_field(): @@ -3163,8 +3163,8 @@ def test_list_views_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListViewsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3176,8 +3176,8 @@ def test_list_views_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListViewsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_views_use_cached_wrapped_rpc(): @@ -3263,7 +3263,7 @@ async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=lo '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_views(request) @@ -3275,7 +3275,7 @@ async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -3291,7 +3291,7 @@ def test_list_views_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3323,7 +3323,7 @@ async def test_list_views_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3359,7 +3359,7 @@ def test_list_views_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_views( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3367,7 +3367,7 @@ def test_list_views_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -3381,7 +3381,7 @@ def test_list_views_flattened_error(): with pytest.raises(ValueError): client.list_views( logging_config.ListViewsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -3401,7 +3401,7 @@ async def test_list_views_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_views( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3409,7 +3409,7 @@ async def test_list_views_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -3423,7 +3423,7 @@ async def test_list_views_flattened_error_async(): with pytest.raises(ValueError): await client.list_views( logging_config.ListViewsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3643,9 +3643,9 @@ def test_get_view(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client.get_view(request) @@ -3657,9 +3657,9 @@ def test_get_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test_get_view_non_empty_request_with_auto_populated_field(): @@ -3674,7 +3674,7 @@ def test_get_view_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3686,7 +3686,7 @@ def test_get_view_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetViewRequest( - name='name_value', + name="name_value", ) def test_get_view_use_cached_wrapped_rpc(): @@ -3772,9 +3772,9 @@ async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logg '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) response = await client.get_view(request) @@ -3786,9 +3786,9 @@ async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio @@ -3804,7 +3804,7 @@ def test_get_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3836,7 +3836,7 @@ async def test_get_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3878,9 +3878,9 @@ def test_create_view(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client.create_view(request) @@ -3892,9 +3892,9 @@ def test_create_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test_create_view_non_empty_request_with_auto_populated_field(): @@ -3909,8 +3909,8 @@ def test_create_view_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateViewRequest( - parent='parent_value', - view_id='view_id_value', + parent="parent_value", + view_id="view_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3922,8 +3922,8 @@ def test_create_view_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateViewRequest( - parent='parent_value', - view_id='view_id_value', + parent="parent_value", + view_id="view_id_value", ) def test_create_view_use_cached_wrapped_rpc(): @@ -4009,9 +4009,9 @@ async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=l '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) response = await client.create_view(request) @@ -4023,9 +4023,9 @@ async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio @@ -4041,7 +4041,7 @@ def test_create_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4073,7 +4073,7 @@ async def test_create_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4115,9 +4115,9 @@ def test_update_view(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client.update_view(request) @@ -4129,9 +4129,9 @@ def test_update_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test_update_view_non_empty_request_with_auto_populated_field(): @@ -4146,7 +4146,7 @@ def test_update_view_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4158,7 +4158,7 @@ def test_update_view_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateViewRequest( - name='name_value', + name="name_value", ) def test_update_view_use_cached_wrapped_rpc(): @@ -4244,9 +4244,9 @@ async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=l '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) response = await client.update_view(request) @@ -4258,9 +4258,9 @@ async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio @@ -4276,7 +4276,7 @@ def test_update_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4308,7 +4308,7 @@ async def test_update_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4374,7 +4374,7 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4386,7 +4386,7 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteViewRequest( - name='name_value', + name="name_value", ) def test_delete_view_use_cached_wrapped_rpc(): @@ -4497,7 +4497,7 @@ def test_delete_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4529,7 +4529,7 @@ async def test_delete_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4571,7 +4571,7 @@ def test_list_sinks(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_sinks(request) @@ -4583,7 +4583,7 @@ def test_list_sinks(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_sinks_non_empty_request_with_auto_populated_field(): @@ -4598,8 +4598,8 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListSinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4611,8 +4611,8 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListSinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_sinks_use_cached_wrapped_rpc(): @@ -4698,7 +4698,7 @@ async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=lo '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_sinks(request) @@ -4710,7 +4710,7 @@ async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -4726,7 +4726,7 @@ def test_list_sinks_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4758,7 +4758,7 @@ async def test_list_sinks_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4794,7 +4794,7 @@ def test_list_sinks_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_sinks( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4802,7 +4802,7 @@ def test_list_sinks_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -4816,7 +4816,7 @@ def test_list_sinks_flattened_error(): with pytest.raises(ValueError): client.list_sinks( logging_config.ListSinksRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -4836,7 +4836,7 @@ async def test_list_sinks_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_sinks( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4844,7 +4844,7 @@ async def test_list_sinks_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -4858,7 +4858,7 @@ async def test_list_sinks_flattened_error_async(): with pytest.raises(ValueError): await client.list_sinks( logging_config.ListSinksRequest(), - parent='parent_value', + parent="parent_value", ) @@ -5078,13 +5078,13 @@ def test_get_sink(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client.get_sink(request) @@ -5097,13 +5097,13 @@ def test_get_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5119,7 +5119,7 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5131,7 +5131,7 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) def test_get_sink_use_cached_wrapped_rpc(): @@ -5217,13 +5217,13 @@ async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logg '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) response = await client.get_sink(request) @@ -5236,13 +5236,13 @@ async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5259,7 +5259,7 @@ def test_get_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5291,7 +5291,7 @@ async def test_get_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5327,7 +5327,7 @@ def test_get_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -5335,7 +5335,7 @@ def test_get_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @@ -5349,7 +5349,7 @@ def test_get_sink_flattened_error(): with pytest.raises(ValueError): client.get_sink( logging_config.GetSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) @pytest.mark.asyncio @@ -5369,7 +5369,7 @@ async def test_get_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -5377,7 +5377,7 @@ async def test_get_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -5391,7 +5391,7 @@ async def test_get_sink_flattened_error_async(): with pytest.raises(ValueError): await client.get_sink( logging_config.GetSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) @@ -5415,13 +5415,13 @@ def test_create_sink(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client.create_sink(request) @@ -5434,13 +5434,13 @@ def test_create_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5456,7 +5456,7 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateSinkRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5468,7 +5468,7 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateSinkRequest( - parent='parent_value', + parent="parent_value", ) def test_create_sink_use_cached_wrapped_rpc(): @@ -5554,13 +5554,13 @@ async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=l '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) response = await client.create_sink(request) @@ -5573,13 +5573,13 @@ async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5596,7 +5596,7 @@ def test_create_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5628,7 +5628,7 @@ async def test_create_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5664,8 +5664,8 @@ def test_create_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -5673,10 +5673,10 @@ def test_create_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val @@ -5690,8 +5690,8 @@ def test_create_sink_flattened_error(): with pytest.raises(ValueError): client.create_sink( logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) @pytest.mark.asyncio @@ -5711,8 +5711,8 @@ async def test_create_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -5720,10 +5720,10 @@ async def test_create_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -5737,8 +5737,8 @@ async def test_create_sink_flattened_error_async(): with pytest.raises(ValueError): await client.create_sink( logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) @@ -5762,13 +5762,13 @@ def test_update_sink(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client.update_sink(request) @@ -5781,13 +5781,13 @@ def test_update_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5803,7 +5803,7 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5815,7 +5815,7 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) def test_update_sink_use_cached_wrapped_rpc(): @@ -5901,13 +5901,13 @@ async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=l '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) response = await client.update_sink(request) @@ -5920,13 +5920,13 @@ async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5943,7 +5943,7 @@ def test_update_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5975,7 +5975,7 @@ async def test_update_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6011,9 +6011,9 @@ def test_update_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6021,13 +6021,13 @@ def test_update_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -6041,9 +6041,9 @@ def test_update_sink_flattened_error(): with pytest.raises(ValueError): client.update_sink( logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -6063,9 +6063,9 @@ async def test_update_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6073,13 +6073,13 @@ async def test_update_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -6093,9 +6093,9 @@ async def test_update_sink_flattened_error_async(): with pytest.raises(ValueError): await client.update_sink( logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -6143,7 +6143,7 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6155,7 +6155,7 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) def test_delete_sink_use_cached_wrapped_rpc(): @@ -6266,7 +6266,7 @@ def test_delete_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6298,7 +6298,7 @@ async def test_delete_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6334,7 +6334,7 @@ def test_delete_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -6342,7 +6342,7 @@ def test_delete_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @@ -6356,7 +6356,7 @@ def test_delete_sink_flattened_error(): with pytest.raises(ValueError): client.delete_sink( logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) @pytest.mark.asyncio @@ -6376,7 +6376,7 @@ async def test_delete_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -6384,7 +6384,7 @@ async def test_delete_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -6398,7 +6398,7 @@ async def test_delete_sink_flattened_error_async(): with pytest.raises(ValueError): await client.delete_sink( logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) @@ -6446,8 +6446,8 @@ def test_create_link_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateLinkRequest( - parent='parent_value', - link_id='link_id_value', + parent="parent_value", + link_id="link_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6459,8 +6459,8 @@ def test_create_link_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateLinkRequest( - parent='parent_value', - link_id='link_id_value', + parent="parent_value", + link_id="link_id_value", ) def test_create_link_use_cached_wrapped_rpc(): @@ -6583,7 +6583,7 @@ def test_create_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateLinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6615,7 +6615,7 @@ async def test_create_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateLinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6651,9 +6651,9 @@ def test_create_link_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_link( - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -6661,13 +6661,13 @@ def test_create_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].link - mock_val = logging_config.Link(name='name_value') + mock_val = logging_config.Link(name="name_value") assert arg == mock_val arg = args[0].link_id - mock_val = 'link_id_value' + mock_val = "link_id_value" assert arg == mock_val @@ -6681,9 +6681,9 @@ def test_create_link_flattened_error(): with pytest.raises(ValueError): client.create_link( logging_config.CreateLinkRequest(), - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) @pytest.mark.asyncio @@ -6705,9 +6705,9 @@ async def test_create_link_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_link( - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -6715,13 +6715,13 @@ async def test_create_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].link - mock_val = logging_config.Link(name='name_value') + mock_val = logging_config.Link(name="name_value") assert arg == mock_val arg = args[0].link_id - mock_val = 'link_id_value' + mock_val = "link_id_value" assert arg == mock_val @pytest.mark.asyncio @@ -6735,9 +6735,9 @@ async def test_create_link_flattened_error_async(): with pytest.raises(ValueError): await client.create_link( logging_config.CreateLinkRequest(), - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) @@ -6785,7 +6785,7 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteLinkRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6797,7 +6797,7 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteLinkRequest( - name='name_value', + name="name_value", ) def test_delete_link_use_cached_wrapped_rpc(): @@ -6920,7 +6920,7 @@ def test_delete_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6952,7 +6952,7 @@ async def test_delete_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6988,7 +6988,7 @@ def test_delete_link_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6996,7 +6996,7 @@ def test_delete_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7010,7 +7010,7 @@ def test_delete_link_flattened_error(): with pytest.raises(ValueError): client.delete_link( logging_config.DeleteLinkRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -7032,7 +7032,7 @@ async def test_delete_link_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7040,7 +7040,7 @@ async def test_delete_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -7054,7 +7054,7 @@ async def test_delete_link_flattened_error_async(): with pytest.raises(ValueError): await client.delete_link( logging_config.DeleteLinkRequest(), - name='name_value', + name="name_value", ) @@ -7078,7 +7078,7 @@ def test_list_links(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_links(request) @@ -7090,7 +7090,7 @@ def test_list_links(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_links_non_empty_request_with_auto_populated_field(): @@ -7105,8 +7105,8 @@ def test_list_links_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListLinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7118,8 +7118,8 @@ def test_list_links_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListLinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_links_use_cached_wrapped_rpc(): @@ -7205,7 +7205,7 @@ async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=lo '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_links(request) @@ -7217,7 +7217,7 @@ async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -7233,7 +7233,7 @@ def test_list_links_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListLinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7265,7 +7265,7 @@ async def test_list_links_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListLinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7301,7 +7301,7 @@ def test_list_links_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_links( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7309,7 +7309,7 @@ def test_list_links_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -7323,7 +7323,7 @@ def test_list_links_flattened_error(): with pytest.raises(ValueError): client.list_links( logging_config.ListLinksRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -7343,7 +7343,7 @@ async def test_list_links_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_links( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7351,7 +7351,7 @@ async def test_list_links_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -7365,7 +7365,7 @@ async def test_list_links_flattened_error_async(): with pytest.raises(ValueError): await client.list_links( logging_config.ListLinksRequest(), - parent='parent_value', + parent="parent_value", ) @@ -7585,8 +7585,8 @@ def test_get_link(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link( - name='name_value', - description='description_value', + name="name_value", + description="description_value", lifecycle_state=logging_config.LifecycleState.ACTIVE, ) response = client.get_link(request) @@ -7599,8 +7599,8 @@ def test_get_link(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -7616,7 +7616,7 @@ def test_get_link_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetLinkRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7628,7 +7628,7 @@ def test_get_link_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetLinkRequest( - name='name_value', + name="name_value", ) def test_get_link_use_cached_wrapped_rpc(): @@ -7714,8 +7714,8 @@ async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logg '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', + name="name_value", + description="description_value", lifecycle_state=logging_config.LifecycleState.ACTIVE, )) response = await client.get_link(request) @@ -7728,8 +7728,8 @@ async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -7746,7 +7746,7 @@ def test_get_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7778,7 +7778,7 @@ async def test_get_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7814,7 +7814,7 @@ def test_get_link_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7822,7 +7822,7 @@ def test_get_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7836,7 +7836,7 @@ def test_get_link_flattened_error(): with pytest.raises(ValueError): client.get_link( logging_config.GetLinkRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -7856,7 +7856,7 @@ async def test_get_link_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7864,7 +7864,7 @@ async def test_get_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -7878,7 +7878,7 @@ async def test_get_link_flattened_error_async(): with pytest.raises(ValueError): await client.get_link( logging_config.GetLinkRequest(), - name='name_value', + name="name_value", ) @@ -7902,7 +7902,7 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_exclusions(request) @@ -7914,7 +7914,7 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_exclusions_non_empty_request_with_auto_populated_field(): @@ -7929,8 +7929,8 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListExclusionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7942,8 +7942,8 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListExclusionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_exclusions_use_cached_wrapped_rpc(): @@ -8029,7 +8029,7 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_exclusions(request) @@ -8041,7 +8041,7 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -8057,7 +8057,7 @@ def test_list_exclusions_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8089,7 +8089,7 @@ async def test_list_exclusions_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8125,7 +8125,7 @@ def test_list_exclusions_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_exclusions( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -8133,7 +8133,7 @@ def test_list_exclusions_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -8147,7 +8147,7 @@ def test_list_exclusions_flattened_error(): with pytest.raises(ValueError): client.list_exclusions( logging_config.ListExclusionsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -8167,7 +8167,7 @@ async def test_list_exclusions_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_exclusions( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -8175,7 +8175,7 @@ async def test_list_exclusions_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -8189,7 +8189,7 @@ async def test_list_exclusions_flattened_error_async(): with pytest.raises(ValueError): await client.list_exclusions( logging_config.ListExclusionsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -8409,9 +8409,9 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client.get_exclusion(request) @@ -8424,9 +8424,9 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8442,7 +8442,7 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8454,7 +8454,7 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetExclusionRequest( - name='name_value', + name="name_value", ) def test_get_exclusion_use_cached_wrapped_rpc(): @@ -8540,9 +8540,9 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) response = await client.get_exclusion(request) @@ -8555,9 +8555,9 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8574,7 +8574,7 @@ def test_get_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8606,7 +8606,7 @@ async def test_get_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8642,7 +8642,7 @@ def test_get_exclusion_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -8650,7 +8650,7 @@ def test_get_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -8664,7 +8664,7 @@ def test_get_exclusion_flattened_error(): with pytest.raises(ValueError): client.get_exclusion( logging_config.GetExclusionRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -8684,7 +8684,7 @@ async def test_get_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -8692,7 +8692,7 @@ async def test_get_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -8706,7 +8706,7 @@ async def test_get_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client.get_exclusion( logging_config.GetExclusionRequest(), - name='name_value', + name="name_value", ) @@ -8730,9 +8730,9 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client.create_exclusion(request) @@ -8745,9 +8745,9 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8763,7 +8763,7 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateExclusionRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8775,7 +8775,7 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest( - parent='parent_value', + parent="parent_value", ) def test_create_exclusion_use_cached_wrapped_rpc(): @@ -8861,9 +8861,9 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) response = await client.create_exclusion(request) @@ -8876,9 +8876,9 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8895,7 +8895,7 @@ def test_create_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8927,7 +8927,7 @@ async def test_create_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8963,8 +8963,8 @@ def test_create_exclusion_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -8972,10 +8972,10 @@ def test_create_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val @@ -8989,8 +8989,8 @@ def test_create_exclusion_flattened_error(): with pytest.raises(ValueError): client.create_exclusion( logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) @pytest.mark.asyncio @@ -9010,8 +9010,8 @@ async def test_create_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -9019,10 +9019,10 @@ async def test_create_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -9036,8 +9036,8 @@ async def test_create_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client.create_exclusion( logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) @@ -9061,9 +9061,9 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client.update_exclusion(request) @@ -9076,9 +9076,9 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -9094,7 +9094,7 @@ def test_update_exclusion_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9106,7 +9106,7 @@ def test_update_exclusion_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateExclusionRequest( - name='name_value', + name="name_value", ) def test_update_exclusion_use_cached_wrapped_rpc(): @@ -9192,9 +9192,9 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) response = await client.update_exclusion(request) @@ -9207,9 +9207,9 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -9226,7 +9226,7 @@ def test_update_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9258,7 +9258,7 @@ async def test_update_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9294,9 +9294,9 @@ def test_update_exclusion_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -9304,13 +9304,13 @@ def test_update_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -9324,9 +9324,9 @@ def test_update_exclusion_flattened_error(): with pytest.raises(ValueError): client.update_exclusion( logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -9346,9 +9346,9 @@ async def test_update_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -9356,13 +9356,13 @@ async def test_update_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -9376,9 +9376,9 @@ async def test_update_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client.update_exclusion( logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -9426,7 +9426,7 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9438,7 +9438,7 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteExclusionRequest( - name='name_value', + name="name_value", ) def test_delete_exclusion_use_cached_wrapped_rpc(): @@ -9549,7 +9549,7 @@ def test_delete_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9581,7 +9581,7 @@ async def test_delete_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9617,7 +9617,7 @@ def test_delete_exclusion_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -9625,7 +9625,7 @@ def test_delete_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -9639,7 +9639,7 @@ def test_delete_exclusion_flattened_error(): with pytest.raises(ValueError): client.delete_exclusion( logging_config.DeleteExclusionRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -9659,7 +9659,7 @@ async def test_delete_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -9667,7 +9667,7 @@ async def test_delete_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -9681,7 +9681,7 @@ async def test_delete_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client.delete_exclusion( logging_config.DeleteExclusionRequest(), - name='name_value', + name="name_value", ) @@ -9705,10 +9705,10 @@ def test_get_cmek_settings(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", ) response = client.get_cmek_settings(request) @@ -9720,10 +9720,10 @@ def test_get_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): @@ -9738,7 +9738,7 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetCmekSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9750,7 +9750,7 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetCmekSettingsRequest( - name='name_value', + name="name_value", ) def test_get_cmek_settings_use_cached_wrapped_rpc(): @@ -9836,10 +9836,10 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", )) response = await client.get_cmek_settings(request) @@ -9851,10 +9851,10 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio @@ -9870,7 +9870,7 @@ def test_get_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9902,7 +9902,7 @@ async def test_get_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9944,10 +9944,10 @@ def test_update_cmek_settings(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", ) response = client.update_cmek_settings(request) @@ -9959,10 +9959,10 @@ def test_update_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): @@ -9977,7 +9977,7 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateCmekSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9989,7 +9989,7 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateCmekSettingsRequest( - name='name_value', + name="name_value", ) def test_update_cmek_settings_use_cached_wrapped_rpc(): @@ -10075,10 +10075,10 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", )) response = await client.update_cmek_settings(request) @@ -10090,10 +10090,10 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio @@ -10109,7 +10109,7 @@ def test_update_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10141,7 +10141,7 @@ async def test_update_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10183,10 +10183,10 @@ def test_get_settings(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, ) response = client.get_settings(request) @@ -10199,10 +10199,10 @@ def test_get_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10218,7 +10218,7 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10230,7 +10230,7 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSettingsRequest( - name='name_value', + name="name_value", ) def test_get_settings_use_cached_wrapped_rpc(): @@ -10316,10 +10316,10 @@ async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, )) response = await client.get_settings(request) @@ -10332,10 +10332,10 @@ async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10352,7 +10352,7 @@ def test_get_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10384,7 +10384,7 @@ async def test_get_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10420,7 +10420,7 @@ def test_get_settings_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_settings( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -10428,7 +10428,7 @@ def test_get_settings_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -10442,7 +10442,7 @@ def test_get_settings_flattened_error(): with pytest.raises(ValueError): client.get_settings( logging_config.GetSettingsRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -10462,7 +10462,7 @@ async def test_get_settings_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_settings( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -10470,7 +10470,7 @@ async def test_get_settings_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -10484,7 +10484,7 @@ async def test_get_settings_flattened_error_async(): with pytest.raises(ValueError): await client.get_settings( logging_config.GetSettingsRequest(), - name='name_value', + name="name_value", ) @@ -10508,10 +10508,10 @@ def test_update_settings(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, ) response = client.update_settings(request) @@ -10524,10 +10524,10 @@ def test_update_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10543,7 +10543,7 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10555,7 +10555,7 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSettingsRequest( - name='name_value', + name="name_value", ) def test_update_settings_use_cached_wrapped_rpc(): @@ -10641,10 +10641,10 @@ async def test_update_settings_async(transport: str = 'grpc_asyncio', request_ty '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, )) response = await client.update_settings(request) @@ -10657,10 +10657,10 @@ async def test_update_settings_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10677,7 +10677,7 @@ def test_update_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10709,7 +10709,7 @@ async def test_update_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10745,8 +10745,8 @@ def test_update_settings_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_settings( - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -10754,10 +10754,10 @@ def test_update_settings_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].settings - mock_val = logging_config.Settings(name='name_value') + mock_val = logging_config.Settings(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -10771,8 +10771,8 @@ def test_update_settings_flattened_error(): with pytest.raises(ValueError): client.update_settings( logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -10792,8 +10792,8 @@ async def test_update_settings_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_settings( - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -10801,10 +10801,10 @@ async def test_update_settings_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].settings - mock_val = logging_config.Settings(name='name_value') + mock_val = logging_config.Settings(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -10818,8 +10818,8 @@ async def test_update_settings_flattened_error_async(): with pytest.raises(ValueError): await client.update_settings( logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -10867,9 +10867,9 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CopyLogEntriesRequest( - name='name_value', - filter='filter_value', - destination='destination_value', + name="name_value", + filter="filter_value", + destination="destination_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10881,9 +10881,9 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CopyLogEntriesRequest( - name='name_value', - filter='filter_value', - destination='destination_value', + name="name_value", + filter="filter_value", + destination="destination_value", ) def test_copy_log_entries_use_cached_wrapped_rpc(): @@ -11865,7 +11865,7 @@ async def test_list_buckets_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_buckets(request=None) @@ -11892,13 +11892,13 @@ async def test_get_bucket_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) await client.get_bucket(request=None) @@ -11979,13 +11979,13 @@ async def test_create_bucket_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) await client.create_bucket(request=None) @@ -12012,13 +12012,13 @@ async def test_update_bucket_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) await client.update_bucket(request=None) @@ -12095,7 +12095,7 @@ async def test_list_views_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_views(request=None) @@ -12122,9 +12122,9 @@ async def test_get_view_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) await client.get_view(request=None) @@ -12151,9 +12151,9 @@ async def test_create_view_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) await client.create_view(request=None) @@ -12180,9 +12180,9 @@ async def test_update_view_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) await client.update_view(request=None) @@ -12234,7 +12234,7 @@ async def test_list_sinks_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_sinks(request=None) @@ -12261,13 +12261,13 @@ async def test_get_sink_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) await client.get_sink(request=None) @@ -12295,13 +12295,13 @@ async def test_create_sink_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) await client.create_sink(request=None) @@ -12329,13 +12329,13 @@ async def test_update_sink_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) await client.update_sink(request=None) @@ -12442,7 +12442,7 @@ async def test_list_links_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_links(request=None) @@ -12469,8 +12469,8 @@ async def test_get_link_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', + name="name_value", + description="description_value", lifecycle_state=logging_config.LifecycleState.ACTIVE, )) await client.get_link(request=None) @@ -12498,7 +12498,7 @@ async def test_list_exclusions_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_exclusions(request=None) @@ -12525,9 +12525,9 @@ async def test_get_exclusion_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) await client.get_exclusion(request=None) @@ -12555,9 +12555,9 @@ async def test_create_exclusion_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) await client.create_exclusion(request=None) @@ -12585,9 +12585,9 @@ async def test_update_exclusion_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) await client.update_exclusion(request=None) @@ -12640,10 +12640,10 @@ async def test_get_cmek_settings_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", )) await client.get_cmek_settings(request=None) @@ -12670,10 +12670,10 @@ async def test_update_cmek_settings_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", )) await client.update_cmek_settings(request=None) @@ -12700,10 +12700,10 @@ async def test_get_settings_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, )) await client.get_settings(request=None) @@ -12731,10 +12731,10 @@ async def test_update_settings_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, )) await client.update_settings(request=None) diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 614126cfdb..436e34001f 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -990,7 +990,7 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.DeleteLogRequest( - log_name='log_name_value', + log_name="log_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1002,7 +1002,7 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.DeleteLogRequest( - log_name='log_name_value', + log_name="log_name_value", ) def test_delete_log_use_cached_wrapped_rpc(): @@ -1113,7 +1113,7 @@ def test_delete_log_field_headers(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name_value' + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1145,7 +1145,7 @@ async def test_delete_log_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name_value' + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1181,7 +1181,7 @@ def test_delete_log_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log( - log_name='log_name_value', + log_name="log_name_value", ) # Establish that the underlying call was made with the expected @@ -1189,7 +1189,7 @@ def test_delete_log_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val @@ -1203,7 +1203,7 @@ def test_delete_log_flattened_error(): with pytest.raises(ValueError): client.delete_log( logging.DeleteLogRequest(), - log_name='log_name_value', + log_name="log_name_value", ) @pytest.mark.asyncio @@ -1223,7 +1223,7 @@ async def test_delete_log_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_log( - log_name='log_name_value', + log_name="log_name_value", ) # Establish that the underlying call was made with the expected @@ -1231,7 +1231,7 @@ async def test_delete_log_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -1245,7 +1245,7 @@ async def test_delete_log_flattened_error_async(): with pytest.raises(ValueError): await client.delete_log( logging.DeleteLogRequest(), - log_name='log_name_value', + log_name="log_name_value", ) @@ -1294,7 +1294,7 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.WriteLogEntriesRequest( - log_name='log_name_value', + log_name="log_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1306,7 +1306,7 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.WriteLogEntriesRequest( - log_name='log_name_value', + log_name="log_name_value", ) def test_write_log_entries_use_cached_wrapped_rpc(): @@ -1424,10 +1424,10 @@ def test_write_log_entries_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) # Establish that the underlying call was made with the expected @@ -1435,16 +1435,16 @@ def test_write_log_entries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels - mock_val = {'key_value': 'value_value'} + mock_val = {"key_value": "value_value"} assert arg == mock_val arg = args[0].entries - mock_val = [log_entry.LogEntry(log_name='log_name_value')] + mock_val = [log_entry.LogEntry(log_name="log_name_value")] assert arg == mock_val @@ -1458,10 +1458,10 @@ def test_write_log_entries_flattened_error(): with pytest.raises(ValueError): client.write_log_entries( logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) @pytest.mark.asyncio @@ -1481,10 +1481,10 @@ async def test_write_log_entries_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) # Establish that the underlying call was made with the expected @@ -1492,16 +1492,16 @@ async def test_write_log_entries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels - mock_val = {'key_value': 'value_value'} + mock_val = {"key_value": "value_value"} assert arg == mock_val arg = args[0].entries - mock_val = [log_entry.LogEntry(log_name='log_name_value')] + mock_val = [log_entry.LogEntry(log_name="log_name_value")] assert arg == mock_val @pytest.mark.asyncio @@ -1515,10 +1515,10 @@ async def test_write_log_entries_flattened_error_async(): with pytest.raises(ValueError): await client.write_log_entries( logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -1542,7 +1542,7 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_log_entries(request) @@ -1554,7 +1554,7 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_log_entries_non_empty_request_with_auto_populated_field(): @@ -1569,9 +1569,9 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListLogEntriesRequest( - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1583,9 +1583,9 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogEntriesRequest( - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) def test_list_log_entries_use_cached_wrapped_rpc(): @@ -1671,7 +1671,7 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_log_entries(request) @@ -1683,7 +1683,7 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1705,9 +1705,9 @@ def test_list_log_entries_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) # Establish that the underlying call was made with the expected @@ -1715,13 +1715,13 @@ def test_list_log_entries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].resource_names - mock_val = ['resource_names_value'] + mock_val = ["resource_names_value"] assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val arg = args[0].order_by - mock_val = 'order_by_value' + mock_val = "order_by_value" assert arg == mock_val @@ -1735,9 +1735,9 @@ def test_list_log_entries_flattened_error(): with pytest.raises(ValueError): client.list_log_entries( logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) @pytest.mark.asyncio @@ -1757,9 +1757,9 @@ async def test_list_log_entries_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) # Establish that the underlying call was made with the expected @@ -1767,13 +1767,13 @@ async def test_list_log_entries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].resource_names - mock_val = ['resource_names_value'] + mock_val = ["resource_names_value"] assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val arg = args[0].order_by - mock_val = 'order_by_value' + mock_val = "order_by_value" assert arg == mock_val @pytest.mark.asyncio @@ -1787,9 +1787,9 @@ async def test_list_log_entries_flattened_error_async(): with pytest.raises(ValueError): await client.list_log_entries( logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) @@ -2004,7 +2004,7 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_monitored_resource_descriptors(request) @@ -2016,7 +2016,7 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): @@ -2031,7 +2031,7 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListMonitoredResourceDescriptorsRequest( - page_token='page_token_value', + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2043,7 +2043,7 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListMonitoredResourceDescriptorsRequest( - page_token='page_token_value', + page_token="page_token_value", ) def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): @@ -2129,7 +2129,7 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_monitored_resource_descriptors(request) @@ -2141,7 +2141,7 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -2360,8 +2360,8 @@ def test_list_logs(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', + log_names=["log_names_value"], + next_page_token="next_page_token_value", ) response = client.list_logs(request) @@ -2373,8 +2373,8 @@ def test_list_logs(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' + assert response.log_names == ["log_names_value"] + assert response.next_page_token == "next_page_token_value" def test_list_logs_non_empty_request_with_auto_populated_field(): @@ -2389,8 +2389,8 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListLogsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2402,8 +2402,8 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_logs_use_cached_wrapped_rpc(): @@ -2489,8 +2489,8 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', + log_names=["log_names_value"], + next_page_token="next_page_token_value", )) response = await client.list_logs(request) @@ -2502,8 +2502,8 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' + assert response.log_names == ["log_names_value"] + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -2519,7 +2519,7 @@ def test_list_logs_field_headers(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2551,7 +2551,7 @@ async def test_list_logs_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2587,7 +2587,7 @@ def test_list_logs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_logs( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2595,7 +2595,7 @@ def test_list_logs_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2609,7 +2609,7 @@ def test_list_logs_flattened_error(): with pytest.raises(ValueError): client.list_logs( logging.ListLogsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -2629,7 +2629,7 @@ async def test_list_logs_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_logs( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2637,7 +2637,7 @@ async def test_list_logs_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -2651,7 +2651,7 @@ async def test_list_logs_flattened_error_async(): with pytest.raises(ValueError): await client.list_logs( logging.ListLogsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3283,7 +3283,7 @@ async def test_list_log_entries_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_log_entries(request=None) @@ -3310,7 +3310,7 @@ async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_monitored_resource_descriptors(request=None) @@ -3337,8 +3337,8 @@ async def test_list_logs_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', + log_names=["log_names_value"], + next_page_token="next_page_token_value", )) await client.list_logs(request=None) diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 027c8ab372..ce4865dc5f 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -964,7 +964,7 @@ def test_list_log_metrics(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_log_metrics(request) @@ -976,7 +976,7 @@ def test_list_log_metrics(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_log_metrics_non_empty_request_with_auto_populated_field(): @@ -991,8 +991,8 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.ListLogMetricsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1004,8 +1004,8 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.ListLogMetricsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_log_metrics_use_cached_wrapped_rpc(): @@ -1091,7 +1091,7 @@ async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_t '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_log_metrics(request) @@ -1103,7 +1103,7 @@ async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1119,7 +1119,7 @@ def test_list_log_metrics_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1151,7 +1151,7 @@ async def test_list_log_metrics_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1187,7 +1187,7 @@ def test_list_log_metrics_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_metrics( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1195,7 +1195,7 @@ def test_list_log_metrics_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1209,7 +1209,7 @@ def test_list_log_metrics_flattened_error(): with pytest.raises(ValueError): client.list_log_metrics( logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -1229,7 +1229,7 @@ async def test_list_log_metrics_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_log_metrics( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1237,7 +1237,7 @@ async def test_list_log_metrics_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -1251,7 +1251,7 @@ async def test_list_log_metrics_flattened_error_async(): with pytest.raises(ValueError): await client.list_log_metrics( logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1471,12 +1471,12 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client.get_log_metric(request) @@ -1489,12 +1489,12 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1510,7 +1510,7 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.GetLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1522,7 +1522,7 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.GetLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) def test_get_log_metric_use_cached_wrapped_rpc(): @@ -1608,12 +1608,12 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) response = await client.get_log_metric(request) @@ -1626,12 +1626,12 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1648,7 +1648,7 @@ def test_get_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1680,7 +1680,7 @@ async def test_get_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1716,7 +1716,7 @@ def test_get_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -1724,7 +1724,7 @@ def test_get_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @@ -1738,7 +1738,7 @@ def test_get_log_metric_flattened_error(): with pytest.raises(ValueError): client.get_log_metric( logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @pytest.mark.asyncio @@ -1758,7 +1758,7 @@ async def test_get_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -1766,7 +1766,7 @@ async def test_get_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -1780,7 +1780,7 @@ async def test_get_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client.get_log_metric( logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @@ -1804,12 +1804,12 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client.create_log_metric(request) @@ -1822,12 +1822,12 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1843,7 +1843,7 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.CreateLogMetricRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1855,7 +1855,7 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.CreateLogMetricRequest( - parent='parent_value', + parent="parent_value", ) def test_create_log_metric_use_cached_wrapped_rpc(): @@ -1941,12 +1941,12 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) response = await client.create_log_metric(request) @@ -1959,12 +1959,12 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1981,7 +1981,7 @@ def test_create_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2013,7 +2013,7 @@ async def test_create_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2049,8 +2049,8 @@ def test_create_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2058,10 +2058,10 @@ def test_create_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @@ -2075,8 +2075,8 @@ def test_create_log_metric_flattened_error(): with pytest.raises(ValueError): client.create_log_metric( logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) @pytest.mark.asyncio @@ -2096,8 +2096,8 @@ async def test_create_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2105,10 +2105,10 @@ async def test_create_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -2122,8 +2122,8 @@ async def test_create_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client.create_log_metric( logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) @@ -2147,12 +2147,12 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client.update_log_metric(request) @@ -2165,12 +2165,12 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2186,7 +2186,7 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.UpdateLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2198,7 +2198,7 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.UpdateLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) def test_update_log_metric_use_cached_wrapped_rpc(): @@ -2284,12 +2284,12 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) response = await client.update_log_metric(request) @@ -2302,12 +2302,12 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2324,7 +2324,7 @@ def test_update_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2356,7 +2356,7 @@ async def test_update_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2392,8 +2392,8 @@ def test_update_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2401,10 +2401,10 @@ def test_update_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @@ -2418,8 +2418,8 @@ def test_update_log_metric_flattened_error(): with pytest.raises(ValueError): client.update_log_metric( logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) @pytest.mark.asyncio @@ -2439,8 +2439,8 @@ async def test_update_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2448,10 +2448,10 @@ async def test_update_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -2465,8 +2465,8 @@ async def test_update_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client.update_log_metric( logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) @@ -2514,7 +2514,7 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.DeleteLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2526,7 +2526,7 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.DeleteLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) def test_delete_log_metric_use_cached_wrapped_rpc(): @@ -2637,7 +2637,7 @@ def test_delete_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2669,7 +2669,7 @@ async def test_delete_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2705,7 +2705,7 @@ def test_delete_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -2713,7 +2713,7 @@ def test_delete_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @@ -2727,7 +2727,7 @@ def test_delete_log_metric_flattened_error(): with pytest.raises(ValueError): client.delete_log_metric( logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @pytest.mark.asyncio @@ -2747,7 +2747,7 @@ async def test_delete_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -2755,7 +2755,7 @@ async def test_delete_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -2769,7 +2769,7 @@ async def test_delete_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client.delete_log_metric( logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @@ -3019,7 +3019,7 @@ async def test_list_log_metrics_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_log_metrics(request=None) @@ -3046,12 +3046,12 @@ async def test_get_log_metric_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) await client.get_log_metric(request=None) @@ -3079,12 +3079,12 @@ async def test_create_log_metric_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) await client.create_log_metric(request=None) @@ -3112,12 +3112,12 @@ async def test_update_log_metric_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) await client.update_log_metric(request=None) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py index 8e644e881d..c665bb5f30 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py @@ -102,86 +102,87 @@ from google.cloud.logging_v2.types.logging_metrics import LogMetric from google.cloud.logging_v2.types.logging_metrics import UpdateLogMetricRequest -__all__ = ('BaseConfigServiceV2Client', - 'BaseConfigServiceV2AsyncClient', - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', - 'BaseMetricsServiceV2Client', - 'BaseMetricsServiceV2AsyncClient', - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryDataset', - 'BigQueryOptions', - 'BucketMetadata', - 'CmekSettings', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesRequest', - 'CopyLogEntriesResponse', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateLinkRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteLinkRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetLinkRequest', - 'GetSettingsRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'IndexConfig', - 'Link', - 'LinkMetadata', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListLinksRequest', - 'ListLinksResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LocationMetadata', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'Settings', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSettingsRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'IndexType', - 'LifecycleState', - 'OperationState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', +__all__ = ( + "BaseConfigServiceV2Client", + "BaseConfigServiceV2AsyncClient", + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", + "BaseMetricsServiceV2Client", + "BaseMetricsServiceV2AsyncClient", + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py index 38f3dc49f5..9b81905462 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py @@ -112,10 +112,10 @@ from .types.logging_metrics import LogMetric from .types.logging_metrics import UpdateLogMetricRequest -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.logging_v2") # type: ignore - api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.logging_v2") # type: ignore + api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: @@ -125,20 +125,24 @@ _py_version_str = sys.version.split()[0] _package_label = "google.cloud.logging_v2" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -176,107 +180,111 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'BaseConfigServiceV2AsyncClient', - 'BaseMetricsServiceV2AsyncClient', - 'LoggingServiceV2AsyncClient', -'BaseConfigServiceV2Client', -'BaseMetricsServiceV2Client', -'BigQueryDataset', -'BigQueryOptions', -'BucketMetadata', -'CmekSettings', -'CopyLogEntriesMetadata', -'CopyLogEntriesRequest', -'CopyLogEntriesResponse', -'CreateBucketRequest', -'CreateExclusionRequest', -'CreateLinkRequest', -'CreateLogMetricRequest', -'CreateSinkRequest', -'CreateViewRequest', -'DeleteBucketRequest', -'DeleteExclusionRequest', -'DeleteLinkRequest', -'DeleteLogMetricRequest', -'DeleteLogRequest', -'DeleteSinkRequest', -'DeleteViewRequest', -'GetBucketRequest', -'GetCmekSettingsRequest', -'GetExclusionRequest', -'GetLinkRequest', -'GetLogMetricRequest', -'GetSettingsRequest', -'GetSinkRequest', -'GetViewRequest', -'IndexConfig', -'IndexType', -'LifecycleState', -'Link', -'LinkMetadata', -'ListBucketsRequest', -'ListBucketsResponse', -'ListExclusionsRequest', -'ListExclusionsResponse', -'ListLinksRequest', -'ListLinksResponse', -'ListLogEntriesRequest', -'ListLogEntriesResponse', -'ListLogMetricsRequest', -'ListLogMetricsResponse', -'ListLogsRequest', -'ListLogsResponse', -'ListMonitoredResourceDescriptorsRequest', -'ListMonitoredResourceDescriptorsResponse', -'ListSinksRequest', -'ListSinksResponse', -'ListViewsRequest', -'ListViewsResponse', -'LocationMetadata', -'LogBucket', -'LogEntry', -'LogEntryOperation', -'LogEntrySourceLocation', -'LogExclusion', -'LogMetric', -'LogSink', -'LogSplit', -'LogView', -'LoggingServiceV2Client', -'OperationState', -'Settings', -'TailLogEntriesRequest', -'TailLogEntriesResponse', -'UndeleteBucketRequest', -'UpdateBucketRequest', -'UpdateCmekSettingsRequest', -'UpdateExclusionRequest', -'UpdateLogMetricRequest', -'UpdateSettingsRequest', -'UpdateSinkRequest', -'UpdateViewRequest', -'WriteLogEntriesPartialErrors', -'WriteLogEntriesRequest', -'WriteLogEntriesResponse', + "BaseConfigServiceV2AsyncClient", + "BaseConfigServiceV2Client", + "BaseMetricsServiceV2AsyncClient", + "BaseMetricsServiceV2Client", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateLogMetricRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteLogMetricRequest", + "DeleteLogRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetLogMetricRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "IndexType", + "LifecycleState", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogExclusion", + "LoggingServiceV2AsyncClient", + "LoggingServiceV2Client", + "LogMetric", + "LogSink", + "LogSplit", + "LogView", + "OperationState", + "Settings", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateLogMetricRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py index 189db0d9c3..6e9260e3df 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -17,6 +17,6 @@ from .async_client import BaseConfigServiceV2AsyncClient __all__ = ( - 'BaseConfigServiceV2Client', - 'BaseConfigServiceV2AsyncClient', + "BaseConfigServiceV2Client", + "BaseConfigServiceV2AsyncClient", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py index ef5a9151a1..24d254382f 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.logging_v2 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -38,7 +49,7 @@ from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -48,12 +59,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class BaseConfigServiceV2AsyncClient: """Service for configuring sinks used to route log entries.""" @@ -188,12 +201,14 @@ def universe_domain(self) -> str: get_transport_class = BaseConfigServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the base config service v2 async client. Args: @@ -248,31 +263,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.BaseConfigServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.ConfigServiceV2", "credentialsType": None, - } + }, ) - async def list_buckets(self, - request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBucketsAsyncPager: + async def list_buckets( + self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. .. code-block:: python @@ -346,8 +363,7 @@ async def sample_list_buckets(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -365,11 +381,13 @@ async def sample_list_buckets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -396,13 +414,14 @@ async def sample_list_buckets(): # Done; return the response. return response - async def get_bucket(self, - request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def get_bucket( + self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Gets a log bucket. .. code-block:: python @@ -460,11 +479,13 @@ async def sample_get_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -480,13 +501,14 @@ async def sample_get_bucket(): # Done; return the response. return response - async def create_bucket_async(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location @@ -555,11 +577,13 @@ async def sample_create_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -583,13 +607,14 @@ async def sample_create_bucket_async(): # Done; return the response. return response - async def update_bucket_async(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -660,11 +685,13 @@ async def sample_update_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -688,13 +715,14 @@ async def sample_update_bucket_async(): # Done; return the response. return response - async def create_bucket(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def create_bucket( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed. @@ -755,11 +783,13 @@ async def sample_create_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -775,13 +805,14 @@ async def sample_create_bucket(): # Done; return the response. return response - async def update_bucket(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + async def update_bucket( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -845,11 +876,13 @@ async def sample_update_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -865,13 +898,14 @@ async def sample_update_bucket(): # Done; return the response. return response - async def delete_bucket(self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a log bucket. Changes the bucket's ``lifecycle_state`` to the @@ -925,11 +959,13 @@ async def sample_delete_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -942,13 +978,14 @@ async def sample_delete_bucket(): metadata=metadata, ) - async def undelete_bucket(self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days. @@ -999,11 +1036,13 @@ async def sample_undelete_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1016,14 +1055,15 @@ async def sample_undelete_bucket(): metadata=metadata, ) - async def _list_views(self, - request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViewsAsyncPager: + async def _list_views( + self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1089,8 +1129,7 @@ async def sample_list_views(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1108,11 +1147,13 @@ async def sample_list_views(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1139,13 +1180,14 @@ async def sample_list_views(): # Done; return the response. return response - async def _get_view(self, - request: Optional[Union[logging_config.GetViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def _get_view( + self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Gets a view on a log bucket.. .. code-block:: python @@ -1203,11 +1245,13 @@ async def sample_get_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1223,13 +1267,14 @@ async def sample_get_view(): # Done; return the response. return response - async def _create_view(self, - request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def _create_view( + self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1289,11 +1334,13 @@ async def sample_create_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1309,13 +1356,14 @@ async def sample_create_view(): # Done; return the response. return response - async def _update_view(self, - request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + async def _update_view( + self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: ``filter``. If an ``UNAVAILABLE`` error is returned, this @@ -1377,11 +1425,13 @@ async def sample_update_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1397,13 +1447,14 @@ async def sample_update_view(): # Done; return the response. return response - async def _delete_view(self, - request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def _delete_view( + self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few @@ -1455,11 +1506,13 @@ async def sample_delete_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1472,14 +1525,15 @@ async def sample_delete_view(): metadata=metadata, ) - async def _list_sinks(self, - request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSinksAsyncPager: + async def _list_sinks( + self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. .. code-block:: python @@ -1548,8 +1602,7 @@ async def sample_list_sinks(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1567,11 +1620,13 @@ async def sample_list_sinks(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1598,14 +1653,15 @@ async def sample_list_sinks(): # Done; return the response. return response - async def _get_sink(self, - request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def _get_sink( + self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Gets a sink. .. code-block:: python @@ -1681,8 +1737,7 @@ async def sample_get_sink(): flattened_params = [sink_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1700,11 +1755,13 @@ async def sample_get_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1720,15 +1777,16 @@ async def sample_get_sink(): # Done; return the response. return response - async def _create_sink(self, - request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def _create_sink( + self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's ``writer_identity`` is not @@ -1820,8 +1878,7 @@ async def sample_create_sink(): flattened_params = [parent, sink] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1841,11 +1898,13 @@ async def sample_create_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1861,16 +1920,17 @@ async def sample_create_sink(): # Done; return the response. return response - async def _update_sink(self, - request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + async def _update_sink( + self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and ``filter``. @@ -1986,8 +2046,7 @@ async def sample_update_sink(): flattened_params = [sink_name, sink, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2009,11 +2068,13 @@ async def sample_update_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2029,14 +2090,15 @@ async def sample_update_sink(): # Done; return the response. return response - async def _delete_sink(self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def _delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2098,8 +2160,7 @@ async def sample_delete_sink(): flattened_params = [sink_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2117,11 +2178,13 @@ async def sample_delete_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2134,16 +2197,17 @@ async def sample_delete_sink(): metadata=metadata, ) - async def _create_link(self, - request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - link: Optional[logging_config.Link] = None, - link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def _create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently @@ -2233,8 +2297,7 @@ async def sample_create_link(): flattened_params = [parent, link, link_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2256,11 +2319,13 @@ async def sample_create_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2284,14 +2349,15 @@ async def sample_create_link(): # Done; return the response. return response - async def _delete_link(self, - request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def _delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2369,8 +2435,7 @@ async def sample_delete_link(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2388,11 +2453,13 @@ async def sample_delete_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2416,14 +2483,15 @@ async def sample_delete_link(): # Done; return the response. return response - async def _list_links(self, - request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLinksAsyncPager: + async def _list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksAsyncPager: r"""Lists links. .. code-block:: python @@ -2491,8 +2559,7 @@ async def sample_list_links(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2510,11 +2577,13 @@ async def sample_list_links(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2541,14 +2610,15 @@ async def sample_list_links(): # Done; return the response. return response - async def _get_link(self, - request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Link: + async def _get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: r"""Gets a link. .. code-block:: python @@ -2611,8 +2681,7 @@ async def sample_get_link(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2630,11 +2699,13 @@ async def sample_get_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2650,14 +2721,15 @@ async def sample_get_link(): # Done; return the response. return response - async def _list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListExclusionsAsyncPager: + async def _list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2727,8 +2799,7 @@ async def sample_list_exclusions(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2746,11 +2817,13 @@ async def sample_list_exclusions(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2777,14 +2850,15 @@ async def sample_list_exclusions(): # Done; return the response. return response - async def _get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def _get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -2858,8 +2932,7 @@ async def sample_get_exclusion(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2877,11 +2950,13 @@ async def sample_get_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -2897,15 +2972,16 @@ async def sample_get_exclusion(): # Done; return the response. return response - async def _create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, - *, - parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def _create_exclusion( + self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. @@ -2996,8 +3072,7 @@ async def sample_create_exclusion(): flattened_params = [parent, exclusion] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3017,11 +3092,13 @@ async def sample_create_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3037,16 +3114,17 @@ async def sample_create_exclusion(): # Done; return the response. return response - async def _update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + async def _update_exclusion( + self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3148,8 +3226,7 @@ async def sample_update_exclusion(): flattened_params = [name, exclusion, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3171,11 +3248,13 @@ async def sample_update_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3191,14 +3270,15 @@ async def sample_update_exclusion(): # Done; return the response. return response - async def _delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def _delete_exclusion( + self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an exclusion in the \_Default sink. .. code-block:: python @@ -3259,8 +3339,7 @@ async def sample_delete_exclusion(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3278,11 +3357,13 @@ async def sample_delete_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3295,13 +3376,14 @@ async def sample_delete_exclusion(): metadata=metadata, ) - async def _get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + async def _get_cmek_settings( + self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud @@ -3383,11 +3465,13 @@ async def sample_get_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3403,13 +3487,14 @@ async def sample_get_cmek_settings(): # Done; return the response. return response - async def _update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + async def _update_cmek_settings( + self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured @@ -3496,11 +3581,13 @@ async def sample_update_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3516,14 +3603,15 @@ async def sample_update_cmek_settings(): # Done; return the response. return response - async def _get_settings(self, - request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + async def _get_settings( + self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud @@ -3615,8 +3703,7 @@ async def sample_get_settings(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3634,11 +3721,13 @@ async def sample_get_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3654,15 +3743,16 @@ async def sample_get_settings(): # Done; return the response. return response - async def _update_settings(self, - request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, - *, - settings: Optional[logging_config.Settings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + async def _update_settings( + self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be @@ -3761,8 +3851,7 @@ async def sample_update_settings(): flattened_params = [settings, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3782,11 +3871,13 @@ async def sample_update_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -3802,13 +3893,14 @@ async def sample_update_settings(): # Done; return the response. return response - async def _copy_log_entries(self, - request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def _copy_log_entries( + self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3933,17 +4025,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -3985,17 +4078,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4040,16 +4134,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "BaseConfigServiceV2AsyncClient": return self @@ -4057,12 +4153,11 @@ async def __aenter__(self) -> "BaseConfigServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BaseConfigServiceV2AsyncClient", -) +__all__ = ("BaseConfigServiceV2AsyncClient",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index 7889ba044b..34d212231b 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -52,7 +65,7 @@ from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -68,13 +81,15 @@ class BaseConfigServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[ConfigServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ConfigServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -110,9 +125,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -121,16 +134,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -144,21 +156,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -193,7 +203,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): BaseConfigServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -210,139 +221,236 @@ def transport(self) -> ConfigServiceV2Transport: return self._transport @staticmethod - def cmek_settings_path(project: str,) -> str: + def cmek_settings_path( + project: str, + ) -> str: """Returns a fully-qualified cmek_settings string.""" - return "projects/{project}/cmekSettings".format(project=project, ) + return "projects/{project}/cmekSettings".format( + project=project, + ) @staticmethod - def parse_cmek_settings_path(path: str) -> Dict[str,str]: + def parse_cmek_settings_path( + path: str, + ) -> Dict[str, str]: """Parses a cmek_settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} @staticmethod - def link_path(project: str,location: str,bucket: str,link: str,) -> str: + def link_path( + project: str, + location: str, + bucket: str, + link: str, + ) -> str: """Returns a fully-qualified link string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format( + project=project, + location=location, + bucket=bucket, + link=link, + ) @staticmethod - def parse_link_path(path: str) -> Dict[str,str]: + def parse_link_path( + path: str, + ) -> Dict[str, str]: """Parses a link path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_bucket_path(project: str,location: str,bucket: str,) -> str: + def log_bucket_path( + project: str, + location: str, + bucket: str, + ) -> str: """Returns a fully-qualified log_bucket string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + return "projects/{project}/locations/{location}/buckets/{bucket}".format( + project=project, + location=location, + bucket=bucket, + ) @staticmethod - def parse_log_bucket_path(path: str) -> Dict[str,str]: + def parse_log_bucket_path( + path: str, + ) -> Dict[str, str]: """Parses a log_bucket path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_exclusion_path(project: str,exclusion: str,) -> str: + def log_exclusion_path( + project: str, + exclusion: str, + ) -> str: """Returns a fully-qualified log_exclusion string.""" - return "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + return "projects/{project}/exclusions/{exclusion}".format( + project=project, + exclusion=exclusion, + ) @staticmethod - def parse_log_exclusion_path(path: str) -> Dict[str,str]: + def parse_log_exclusion_path( + path: str, + ) -> Dict[str, str]: """Parses a log_exclusion path into its component segments.""" m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_sink_path(project: str,sink: str,) -> str: + def log_sink_path( + project: str, + sink: str, + ) -> str: """Returns a fully-qualified log_sink string.""" - return "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + return "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) @staticmethod - def parse_log_sink_path(path: str) -> Dict[str,str]: + def parse_log_sink_path( + path: str, + ) -> Dict[str, str]: """Parses a log_sink path into its component segments.""" m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def log_view_path(project: str,location: str,bucket: str,view: str,) -> str: + def log_view_path( + project: str, + location: str, + bucket: str, + view: str, + ) -> str: """Returns a fully-qualified log_view string.""" - return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) @staticmethod - def parse_log_view_path(path: str) -> Dict[str,str]: + def parse_log_view_path( + path: str, + ) -> Dict[str, str]: """Parses a log_view path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def settings_path(project: str,) -> str: + def settings_path( + project: str, + ) -> str: """Returns a fully-qualified settings string.""" - return "projects/{project}/settings".format(project=project, ) + return "projects/{project}/settings".format( + project=project, + ) @staticmethod - def parse_settings_path(path: str) -> Dict[str,str]: + def parse_settings_path( + path: str, + ) -> Dict[str, str]: """Parses a settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/settings$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -374,8 +482,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = BaseConfigServiceV2Client._use_client_cert_effective() @@ -505,7 +615,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -545,12 +655,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the base config service v2 client. Args: @@ -607,12 +719,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BaseConfigServiceV2Client._read_environment_variables() self._client_cert_source = BaseConfigServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = BaseConfigServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -632,22 +744,22 @@ def __init__(self, *, if transport_provided: # transport is a ConfigServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(ConfigServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - BaseConfigServiceV2Client._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or BaseConfigServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -677,25 +789,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.BaseConfigServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.ConfigServiceV2", "credentialsType": None, - } + }, ) - def list_buckets(self, - request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBucketsPager: + def list_buckets( + self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsPager: r"""Lists log buckets. .. code-block:: python @@ -769,8 +884,7 @@ def sample_list_buckets(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -787,11 +901,13 @@ def sample_list_buckets(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -818,13 +934,14 @@ def sample_list_buckets(): # Done; return the response. return response - def get_bucket(self, - request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def get_bucket( + self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Gets a log bucket. .. code-block:: python @@ -882,11 +999,13 @@ def sample_get_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -902,13 +1021,14 @@ def sample_get_bucket(): # Done; return the response. return response - def create_bucket_async(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_bucket_async( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a log bucket asynchronously that can be used to store log entries. After a bucket has been created, the bucket's location @@ -977,11 +1097,13 @@ def sample_create_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1005,13 +1127,14 @@ def sample_create_bucket_async(): # Done; return the response. return response - def update_bucket_async(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_bucket_async( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates a log bucket asynchronously. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -1082,11 +1205,13 @@ def sample_update_bucket_async(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1110,13 +1235,14 @@ def sample_update_bucket_async(): # Done; return the response. return response - def create_bucket(self, - request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def create_bucket( + self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's location cannot be changed. @@ -1177,11 +1303,13 @@ def sample_create_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1197,13 +1325,14 @@ def sample_create_bucket(): # Done; return the response. return response - def update_bucket(self, - request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogBucket: + def update_bucket( + self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: r"""Updates a log bucket. If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, @@ -1267,11 +1396,13 @@ def sample_update_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1287,13 +1418,14 @@ def sample_update_bucket(): # Done; return the response. return response - def delete_bucket(self, - request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_bucket( + self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a log bucket. Changes the bucket's ``lifecycle_state`` to the @@ -1347,11 +1479,13 @@ def sample_delete_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1364,13 +1498,14 @@ def sample_delete_bucket(): metadata=metadata, ) - def undelete_bucket(self, - request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def undelete_bucket( + self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 days. @@ -1421,11 +1556,13 @@ def sample_undelete_bucket(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1438,14 +1575,15 @@ def sample_undelete_bucket(): metadata=metadata, ) - def _list_views(self, - request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListViewsPager: + def _list_views( + self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1511,8 +1649,7 @@ def sample_list_views(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1529,11 +1666,13 @@ def sample_list_views(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1560,13 +1699,14 @@ def sample_list_views(): # Done; return the response. return response - def _get_view(self, - request: Optional[Union[logging_config.GetViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def _get_view( + self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Gets a view on a log bucket.. .. code-block:: python @@ -1624,11 +1764,13 @@ def sample_get_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1644,13 +1786,14 @@ def sample_get_view(): # Done; return the response. return response - def _create_view(self, - request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def _create_view( + self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1710,11 +1853,13 @@ def sample_create_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1730,13 +1875,14 @@ def sample_create_view(): # Done; return the response. return response - def _update_view(self, - request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogView: + def _update_view( + self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new view: ``filter``. If an ``UNAVAILABLE`` error is returned, this @@ -1798,11 +1944,13 @@ def sample_update_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1818,13 +1966,14 @@ def sample_update_view(): # Done; return the response. return response - def _delete_view(self, - request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def _delete_view( + self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it can delete the view. If this occurs, please try again in a few @@ -1876,11 +2025,13 @@ def sample_delete_view(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1893,14 +2044,15 @@ def sample_delete_view(): metadata=metadata, ) - def _list_sinks(self, - request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListSinksPager: + def _list_sinks( + self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSinksPager: r"""Lists sinks. .. code-block:: python @@ -1969,8 +2121,7 @@ def sample_list_sinks(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1987,11 +2138,13 @@ def sample_list_sinks(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2018,14 +2171,15 @@ def sample_list_sinks(): # Done; return the response. return response - def _get_sink(self, - request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def _get_sink( + self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Gets a sink. .. code-block:: python @@ -2101,8 +2255,7 @@ def sample_get_sink(): flattened_params = [sink_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2119,11 +2272,13 @@ def sample_get_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2139,15 +2294,16 @@ def sample_get_sink(): # Done; return the response. return response - def _create_sink(self, - request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def _create_sink( + self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's ``writer_identity`` is not @@ -2239,8 +2395,7 @@ def sample_create_sink(): flattened_params = [parent, sink] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2259,11 +2414,13 @@ def sample_create_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2279,16 +2436,17 @@ def sample_create_sink(): # Done; return the response. return response - def _update_sink(self, - request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - sink: Optional[logging_config.LogSink] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogSink: + def _update_sink( + self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and ``filter``. @@ -2404,8 +2562,7 @@ def sample_update_sink(): flattened_params = [sink_name, sink, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2426,11 +2583,13 @@ def sample_update_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2446,14 +2605,15 @@ def sample_update_sink(): # Done; return the response. return response - def _delete_sink(self, - request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, - *, - sink_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def _delete_sink( + self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2515,8 +2675,7 @@ def sample_delete_sink(): flattened_params = [sink_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2533,11 +2692,13 @@ def sample_delete_sink(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("sink_name", request.sink_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2550,16 +2711,17 @@ def sample_delete_sink(): metadata=metadata, ) - def _create_link(self, - request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, - *, - parent: Optional[str] = None, - link: Optional[logging_config.Link] = None, - link_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def _create_link( + self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs stored in the log bucket. A log bucket may currently @@ -2649,8 +2811,7 @@ def sample_create_link(): flattened_params = [parent, link, link_id] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2671,11 +2832,13 @@ def sample_create_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2699,14 +2862,15 @@ def sample_create_link(): # Done; return the response. return response - def _delete_link(self, - request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def _delete_link( + self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2784,8 +2948,7 @@ def sample_delete_link(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2802,11 +2965,13 @@ def sample_delete_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2830,14 +2995,15 @@ def sample_delete_link(): # Done; return the response. return response - def _list_links(self, - request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLinksPager: + def _list_links( + self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLinksPager: r"""Lists links. .. code-block:: python @@ -2905,8 +3071,7 @@ def sample_list_links(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2923,11 +3088,13 @@ def sample_list_links(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2954,14 +3121,15 @@ def sample_list_links(): # Done; return the response. return response - def _get_link(self, - request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Link: + def _get_link( + self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: r"""Gets a link. .. code-block:: python @@ -3024,8 +3192,7 @@ def sample_get_link(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3042,11 +3209,13 @@ def sample_get_link(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3062,14 +3231,15 @@ def sample_get_link(): # Done; return the response. return response - def _list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListExclusionsPager: + def _list_exclusions( + self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -3139,8 +3309,7 @@ def sample_list_exclusions(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3157,11 +3326,13 @@ def sample_list_exclusions(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3188,14 +3359,15 @@ def sample_list_exclusions(): # Done; return the response. return response - def _get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def _get_exclusion( + self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. .. code-block:: python @@ -3269,8 +3441,7 @@ def sample_get_exclusion(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3287,11 +3458,13 @@ def sample_get_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3307,15 +3480,16 @@ def sample_get_exclusion(): # Done; return the response. return response - def _create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, - *, - parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def _create_exclusion( + self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. @@ -3406,8 +3580,7 @@ def sample_create_exclusion(): flattened_params = [parent, exclusion] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3426,11 +3599,13 @@ def sample_create_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3446,16 +3621,17 @@ def sample_create_exclusion(): # Done; return the response. return response - def _update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.LogExclusion: + def _update_exclusion( + self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3557,8 +3733,7 @@ def sample_update_exclusion(): flattened_params = [name, exclusion, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3579,11 +3754,13 @@ def sample_update_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3599,14 +3776,15 @@ def sample_update_exclusion(): # Done; return the response. return response - def _delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def _delete_exclusion( + self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes an exclusion in the \_Default sink. .. code-block:: python @@ -3667,8 +3845,7 @@ def sample_delete_exclusion(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -3685,11 +3862,13 @@ def sample_delete_exclusion(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3702,13 +3881,14 @@ def sample_delete_exclusion(): metadata=metadata, ) - def _get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + def _get_cmek_settings( + self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. Note: CMEK for the Log Router can be configured for Google Cloud @@ -3790,11 +3970,13 @@ def sample_get_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3810,13 +3992,14 @@ def sample_get_cmek_settings(): # Done; return the response. return response - def _update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.CmekSettings: + def _update_cmek_settings( + self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. Note: CMEK for the Log Router can currently only be configured @@ -3903,11 +4086,13 @@ def sample_update_cmek_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -3923,14 +4108,15 @@ def sample_update_cmek_settings(): # Done; return the response. return response - def _get_settings(self, - request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + def _get_settings( + self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. Note: Settings for the Log Router can be get for Google Cloud @@ -4022,8 +4208,7 @@ def sample_get_settings(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -4040,11 +4225,13 @@ def sample_get_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -4060,15 +4247,16 @@ def sample_get_settings(): # Done; return the response. return response - def _update_settings(self, - request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, - *, - settings: Optional[logging_config.Settings] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_config.Settings: + def _update_settings( + self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. Note: Settings for the Log Router can currently only be @@ -4167,8 +4355,7 @@ def sample_update_settings(): flattened_params = [settings, update_mask] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -4187,11 +4374,13 @@ def sample_update_settings(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -4207,13 +4396,14 @@ def sample_update_settings(): # Done; return the response. return response - def _copy_log_entries(self, - request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def _copy_log_entries( + self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -4351,10 +4541,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -4362,7 +4549,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4407,10 +4598,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -4418,7 +4606,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -4466,21 +4658,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -4488,6 +4677,4 @@ def cancel_operation( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BaseConfigServiceV2Client", -) +__all__ = ("BaseConfigServiceV2Client",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py index 1af6b54c99..6ef08a181b 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -44,14 +45,17 @@ class ListBucketsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListBucketsResponse], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListBucketsResponse], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -92,7 +96,7 @@ def __iter__(self) -> Iterator[logging_config.LogBucket]: yield from page.buckets def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListBucketsAsyncPager: @@ -112,14 +116,17 @@ class ListBucketsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], - request: logging_config.ListBucketsRequest, - response: logging_config.ListBucketsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -154,6 +161,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): async for page in self.pages: @@ -163,7 +171,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListViewsPager: @@ -183,14 +191,17 @@ class ListViewsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListViewsResponse], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -231,7 +242,7 @@ def __iter__(self) -> Iterator[logging_config.LogView]: yield from page.views def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListViewsAsyncPager: @@ -251,14 +262,17 @@ class ListViewsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListViewsResponse]], - request: logging_config.ListViewsRequest, - response: logging_config.ListViewsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -293,6 +307,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): async for page in self.pages: @@ -302,7 +317,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSinksPager: @@ -322,14 +337,17 @@ class ListSinksPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListSinksResponse], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListSinksResponse], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -370,7 +388,7 @@ def __iter__(self) -> Iterator[logging_config.LogSink]: yield from page.sinks def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListSinksAsyncPager: @@ -390,14 +408,17 @@ class ListSinksAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListSinksResponse]], - request: logging_config.ListSinksRequest, - response: logging_config.ListSinksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListSinksResponse]], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -432,6 +453,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): async for page in self.pages: @@ -441,7 +463,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLinksPager: @@ -461,14 +483,17 @@ class ListLinksPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListLinksResponse], - request: logging_config.ListLinksRequest, - response: logging_config.ListLinksResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -509,7 +534,7 @@ def __iter__(self) -> Iterator[logging_config.Link]: yield from page.links def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLinksAsyncPager: @@ -529,14 +554,17 @@ class ListLinksAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListLinksResponse]], - request: logging_config.ListLinksRequest, - response: logging_config.ListLinksResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -571,6 +599,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.Link]: async def async_generator(): async for page in self.pages: @@ -580,7 +609,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListExclusionsPager: @@ -600,14 +629,17 @@ class ListExclusionsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_config.ListExclusionsResponse], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_config.ListExclusionsResponse], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -648,7 +680,7 @@ def __iter__(self) -> Iterator[logging_config.LogExclusion]: yield from page.exclusions def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListExclusionsAsyncPager: @@ -668,14 +700,17 @@ class ListExclusionsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], - request: logging_config.ListExclusionsRequest, - response: logging_config.ListExclusionsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -710,6 +745,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): async for page in self.pages: @@ -719,4 +755,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index cc3da21c11..6f8979ef81 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -23,11 +23,11 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] -_transport_registry['grpc'] = ConfigServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = ConfigServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport __all__ = ( - 'ConfigServiceV2Transport', - 'ConfigServiceV2GrpcTransport', - 'ConfigServiceV2GrpcAsyncIOTransport', + "ConfigServiceV2Transport", + "ConfigServiceV2GrpcTransport", + "ConfigServiceV2GrpcAsyncIOTransport", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 625a388594..1ea14ce3a1 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -25,11 +25,11 @@ from google.api_core import retry as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -41,32 +41,35 @@ class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", ) + # fmt: on - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -102,10 +105,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -113,15 +116,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -383,12 +390,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ @@ -399,293 +406,453 @@ def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() + # fmt: off @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Union[ - logging_config.ListBucketsResponse, - Awaitable[logging_config.ListBucketsResponse] - ]]: + def list_buckets( + self, + ) -> Callable[ + [logging_config.ListBucketsRequest], + Union[ + logging_config.ListBucketsResponse, + Awaitable[logging_config.ListBucketsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def get_bucket( + self, + ) -> Callable[ + [logging_config.GetBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_bucket_async( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_bucket_async( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def create_bucket( + self, + ) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Union[ - logging_config.LogBucket, - Awaitable[logging_config.LogBucket] - ]]: + def update_bucket( + self, + ) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_bucket( + self, + ) -> Callable[ + [logging_config.DeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def undelete_bucket( + self, + ) -> Callable[ + [logging_config.UndeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Union[ - logging_config.ListViewsResponse, - Awaitable[logging_config.ListViewsResponse] - ]]: + def list_views( + self, + ) -> Callable[ + [logging_config.ListViewsRequest], + Union[ + logging_config.ListViewsResponse, + Awaitable[logging_config.ListViewsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def get_view( + self, + ) -> Callable[ + [logging_config.GetViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def create_view( + self, + ) -> Callable[ + [logging_config.CreateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Union[ - logging_config.LogView, - Awaitable[logging_config.LogView] - ]]: + def update_view( + self, + ) -> Callable[ + [logging_config.UpdateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_view( + self, + ) -> Callable[ + [logging_config.DeleteViewRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Union[ - logging_config.ListSinksResponse, - Awaitable[logging_config.ListSinksResponse] - ]]: + def list_sinks( + self, + ) -> Callable[ + [logging_config.ListSinksRequest], + Union[ + logging_config.ListSinksResponse, + Awaitable[logging_config.ListSinksResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def get_sink( + self, + ) -> Callable[ + [logging_config.GetSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def create_sink( + self, + ) -> Callable[ + [logging_config.CreateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Union[ - logging_config.LogSink, - Awaitable[logging_config.LogSink] - ]]: + def update_sink( + self, + ) -> Callable[ + [logging_config.UpdateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_sink( + self, + ) -> Callable[ + [logging_config.DeleteSinkRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_link( + self, + ) -> Callable[ + [logging_config.CreateLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_link( + self, + ) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - Union[ - logging_config.ListLinksResponse, - Awaitable[logging_config.ListLinksResponse] - ]]: + def list_links( + self, + ) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - Union[ - logging_config.Link, - Awaitable[logging_config.Link] - ]]: + def get_link( + self, + ) -> Callable[ + [logging_config.GetLinkRequest], + Union[ + logging_config.Link, + Awaitable[logging_config.Link] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Union[ - logging_config.ListExclusionsResponse, - Awaitable[logging_config.ListExclusionsResponse] - ]]: + def list_exclusions( + self, + ) -> Callable[ + [logging_config.ListExclusionsRequest], + Union[ + logging_config.ListExclusionsResponse, + Awaitable[logging_config.ListExclusionsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def get_exclusion( + self, + ) -> Callable[ + [logging_config.GetExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def create_exclusion( + self, + ) -> Callable[ + [logging_config.CreateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Union[ - logging_config.LogExclusion, - Awaitable[logging_config.LogExclusion] - ]]: + def update_exclusion( + self, + ) -> Callable[ + [logging_config.UpdateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_exclusion( + self, + ) -> Callable[ + [logging_config.DeleteExclusionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: + def get_cmek_settings( + self, + ) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Union[ - logging_config.CmekSettings, - Awaitable[logging_config.CmekSettings] - ]]: + def update_cmek_settings( + self, + ) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - Union[ - logging_config.Settings, - Awaitable[logging_config.Settings] - ]]: + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - Union[ - logging_config.Settings, - Awaitable[logging_config.Settings] - ]]: + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -699,19 +866,13 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property @@ -719,6 +880,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'ConfigServiceV2Transport', -) +__all__ = ("ConfigServiceV2Transport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index ac3a4393a8..d911a43a62 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -22,7 +22,7 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -32,12 +32,13 @@ import proto # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -57,10 +58,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -68,7 +71,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -94,7 +97,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -116,28 +119,31 @@ class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -212,7 +218,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -221,7 +228,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -256,19 +264,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -304,13 +314,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -322,17 +331,15 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - logging_config.ListBucketsResponse]: + def list_buckets( + self, + ) -> Callable[[logging_config.ListBucketsRequest], logging_config.ListBucketsResponse]: r"""Return a callable for the list buckets method over gRPC. Lists log buckets. @@ -347,18 +354,18 @@ def list_buckets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, ) - return self._stubs['list_buckets'] + return self._stubs["list_buckets"] @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - logging_config.LogBucket]: + def get_bucket( + self, + ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. Gets a log bucket. @@ -373,18 +380,18 @@ def get_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['get_bucket'] + return self._stubs["get_bucket"] @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - operations_pb2.Operation]: + def create_bucket_async( + self, + ) -> Callable[[logging_config.CreateBucketRequest], operations_pb2.Operation]: r"""Return a callable for the create bucket async method over gRPC. Creates a log bucket asynchronously that can be used @@ -402,18 +409,18 @@ def create_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_bucket_async'] + return self._stubs["create_bucket_async"] @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - operations_pb2.Operation]: + def update_bucket_async( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], operations_pb2.Operation]: r"""Return a callable for the update bucket async method over gRPC. Updates a log bucket asynchronously. @@ -434,18 +441,18 @@ def update_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_bucket_async'] + return self._stubs["update_bucket_async"] @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - logging_config.LogBucket]: + def create_bucket( + self, + ) -> Callable[[logging_config.CreateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the create bucket method over gRPC. Creates a log bucket that can be used to store log @@ -462,18 +469,18 @@ def create_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['create_bucket'] + return self._stubs["create_bucket"] @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - logging_config.LogBucket]: + def update_bucket( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. Updates a log bucket. @@ -494,18 +501,18 @@ def update_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['update_bucket'] + return self._stubs["update_bucket"] @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - empty_pb2.Empty]: + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. Deletes a log bucket. @@ -525,18 +532,18 @@ def delete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_bucket'] + return self._stubs["delete_bucket"] @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - empty_pb2.Empty]: + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a log bucket. A bucket that has been @@ -553,18 +560,18 @@ def undelete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['undelete_bucket'] + return self._stubs["undelete_bucket"] @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - logging_config.ListViewsResponse]: + def list_views( + self, + ) -> Callable[[logging_config.ListViewsRequest], logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. Lists views on a log bucket. @@ -579,18 +586,18 @@ def list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', + if "list_views" not in self._stubs: + self._stubs["list_views"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['list_views'] + return self._stubs["list_views"] @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - logging_config.LogView]: + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], logging_config.LogView]: r"""Return a callable for the get view method over gRPC. Gets a view on a log bucket.. @@ -605,18 +612,18 @@ def get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', + if "get_view" not in self._stubs: + self._stubs["get_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['get_view'] + return self._stubs["get_view"] @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - logging_config.LogView]: + def create_view( + self, + ) -> Callable[[logging_config.CreateViewRequest], logging_config.LogView]: r"""Return a callable for the create view method over gRPC. Creates a view over log entries in a log bucket. A @@ -632,18 +639,18 @@ def create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', + if "create_view" not in self._stubs: + self._stubs["create_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['create_view'] + return self._stubs["create_view"] @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - logging_config.LogView]: + def update_view( + self, + ) -> Callable[[logging_config.UpdateViewRequest], logging_config.LogView]: r"""Return a callable for the update view method over gRPC. Updates a view on a log bucket. This method replaces the @@ -662,18 +669,18 @@ def update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', + if "update_view" not in self._stubs: + self._stubs["update_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['update_view'] + return self._stubs["update_view"] @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - empty_pb2.Empty]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -691,18 +698,18 @@ def delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_view'] + return self._stubs["delete_view"] @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - logging_config.ListSinksResponse]: + def list_sinks( + self, + ) -> Callable[[logging_config.ListSinksRequest], logging_config.ListSinksResponse]: r"""Return a callable for the list sinks method over gRPC. Lists sinks. @@ -717,18 +724,18 @@ def list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['list_sinks'] + return self._stubs["list_sinks"] @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - logging_config.LogSink]: + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], logging_config.LogSink]: r"""Return a callable for the get sink method over gRPC. Gets a sink. @@ -743,18 +750,18 @@ def get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['get_sink'] + return self._stubs["get_sink"] @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - logging_config.LogSink]: + def create_sink( + self, + ) -> Callable[[logging_config.CreateSinkRequest], logging_config.LogSink]: r"""Return a callable for the create sink method over gRPC. Creates a sink that exports specified log entries to a @@ -773,18 +780,18 @@ def create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['create_sink'] + return self._stubs["create_sink"] @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - logging_config.LogSink]: + def update_sink( + self, + ) -> Callable[[logging_config.UpdateSinkRequest], logging_config.LogSink]: r"""Return a callable for the update sink method over gRPC. Updates a sink. This method replaces the following fields in the @@ -804,18 +811,18 @@ def update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['update_sink'] + return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - empty_pb2.Empty]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], empty_pb2.Empty]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -831,18 +838,18 @@ def delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_sink'] + return self._stubs["delete_sink"] @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - operations_pb2.Operation]: + def create_link( + self, + ) -> Callable[[logging_config.CreateLinkRequest], operations_pb2.Operation]: r"""Return a callable for the create link method over gRPC. Asynchronously creates a linked dataset in BigQuery @@ -860,18 +867,18 @@ def create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_link' not in self._stubs: - self._stubs['create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateLink', + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_link'] + return self._stubs["create_link"] @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - operations_pb2.Operation]: + def delete_link( + self, + ) -> Callable[[logging_config.DeleteLinkRequest], operations_pb2.Operation]: r"""Return a callable for the delete link method over gRPC. Deletes a link. This will also delete the @@ -887,18 +894,18 @@ def delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteLink', + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_link'] + return self._stubs["delete_link"] @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - logging_config.ListLinksResponse]: + def list_links( + self, + ) -> Callable[[logging_config.ListLinksRequest], logging_config.ListLinksResponse]: r"""Return a callable for the list links method over gRPC. Lists links. @@ -913,18 +920,18 @@ def list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_links' not in self._stubs: - self._stubs['list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListLinks', + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['list_links'] + return self._stubs["list_links"] @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - logging_config.Link]: + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], logging_config.Link]: r"""Return a callable for the get link method over gRPC. Gets a link. @@ -939,18 +946,18 @@ def get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_link' not in self._stubs: - self._stubs['get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetLink', + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['get_link'] + return self._stubs["get_link"] @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - logging_config.ListExclusionsResponse]: + def list_exclusions( + self, + ) -> Callable[[logging_config.ListExclusionsRequest], logging_config.ListExclusionsResponse]: r"""Return a callable for the list exclusions method over gRPC. Lists all the exclusions on the \_Default sink in a parent @@ -966,18 +973,18 @@ def list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['list_exclusions'] + return self._stubs["list_exclusions"] @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - logging_config.LogExclusion]: + def get_exclusion( + self, + ) -> Callable[[logging_config.GetExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. Gets the description of an exclusion in the \_Default sink. @@ -992,18 +999,18 @@ def get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['get_exclusion'] + return self._stubs["get_exclusion"] @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - logging_config.LogExclusion]: + def create_exclusion( + self, + ) -> Callable[[logging_config.CreateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. Creates a new exclusion in the \_Default sink in a specified @@ -1020,18 +1027,18 @@ def create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['create_exclusion'] + return self._stubs["create_exclusion"] @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - logging_config.LogExclusion]: + def update_exclusion( + self, + ) -> Callable[[logging_config.UpdateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. Changes one or more properties of an existing exclusion in the @@ -1047,18 +1054,18 @@ def update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['update_exclusion'] + return self._stubs["update_exclusion"] @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - empty_pb2.Empty]: + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion in the \_Default sink. @@ -1073,18 +1080,18 @@ def delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_exclusion'] + return self._stubs["delete_exclusion"] @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - logging_config.CmekSettings]: + def get_cmek_settings( + self, + ) -> Callable[[logging_config.GetCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. Gets the Logging CMEK settings for the given resource. @@ -1108,18 +1115,18 @@ def get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['get_cmek_settings'] + return self._stubs["get_cmek_settings"] @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - logging_config.CmekSettings]: + def update_cmek_settings( + self, + ) -> Callable[[logging_config.UpdateCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the update cmek settings method over gRPC. Updates the Log Router CMEK settings for the given resource. @@ -1148,18 +1155,18 @@ def update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['update_cmek_settings'] + return self._stubs["update_cmek_settings"] @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - logging_config.Settings]: + def get_settings( + self, + ) -> Callable[[logging_config.GetSettingsRequest], logging_config.Settings]: r"""Return a callable for the get settings method over gRPC. Gets the Log Router settings for the given resource. @@ -1184,18 +1191,18 @@ def get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSettings', + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['get_settings'] + return self._stubs["get_settings"] @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - logging_config.Settings]: + def update_settings( + self, + ) -> Callable[[logging_config.UpdateSettingsRequest], logging_config.Settings]: r"""Return a callable for the update settings method over gRPC. Updates the Log Router settings for the given resource. @@ -1227,18 +1234,18 @@ def update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSettings', + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['update_settings'] + return self._stubs["update_settings"] @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - operations_pb2.Operation]: + def copy_log_entries( + self, + ) -> Callable[[logging_config.CopyLogEntriesRequest], operations_pb2.Operation]: r"""Return a callable for the copy log entries method over gRPC. Copies a set of log entries from a log bucket to a @@ -1254,13 +1261,13 @@ def copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['copy_log_entries'] + return self._stubs["copy_log_entries"] def close(self): self._logged_channel.close() @@ -1269,8 +1276,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1287,8 +1293,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1305,8 +1310,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1324,6 +1328,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'ConfigServiceV2GrpcTransport', -) +__all__ = ("ConfigServiceV2GrpcTransport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index fc7af0c06b..a1ab5c8294 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -25,23 +25,24 @@ from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -61,10 +62,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -72,7 +75,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -98,7 +101,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.ConfigServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -125,13 +128,15 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -162,29 +167,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -259,7 +266,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -268,7 +276,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -328,17 +337,15 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsAsyncClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - Awaitable[logging_config.ListBucketsResponse]]: + def list_buckets( + self, + ) -> Callable[[logging_config.ListBucketsRequest], Awaitable[logging_config.ListBucketsResponse]]: r"""Return a callable for the list buckets method over gRPC. Lists log buckets. @@ -353,18 +360,18 @@ def list_buckets(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListBuckets', + if "list_buckets" not in self._stubs: + self._stubs["list_buckets"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListBuckets", request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, ) - return self._stubs['list_buckets'] + return self._stubs["list_buckets"] @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - Awaitable[logging_config.LogBucket]]: + def get_bucket( + self, + ) -> Callable[[logging_config.GetBucketRequest], Awaitable[logging_config.LogBucket]]: r"""Return a callable for the get bucket method over gRPC. Gets a log bucket. @@ -379,18 +386,18 @@ def get_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetBucket', + if "get_bucket" not in self._stubs: + self._stubs["get_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetBucket", request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['get_bucket'] + return self._stubs["get_bucket"] @property - def create_bucket_async(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[operations_pb2.Operation]]: + def create_bucket_async( + self, + ) -> Callable[[logging_config.CreateBucketRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create bucket async method over gRPC. Creates a log bucket asynchronously that can be used @@ -408,18 +415,18 @@ def create_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + if "create_bucket_async" not in self._stubs: + self._stubs["create_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucketAsync", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_bucket_async'] + return self._stubs["create_bucket_async"] @property - def update_bucket_async(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[operations_pb2.Operation]]: + def update_bucket_async( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update bucket async method over gRPC. Updates a log bucket asynchronously. @@ -440,18 +447,18 @@ def update_bucket_async(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + if "update_bucket_async" not in self._stubs: + self._stubs["update_bucket_async"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucketAsync", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_bucket_async'] + return self._stubs["update_bucket_async"] @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - Awaitable[logging_config.LogBucket]]: + def create_bucket( + self, + ) -> Callable[[logging_config.CreateBucketRequest], Awaitable[logging_config.LogBucket]]: r"""Return a callable for the create bucket method over gRPC. Creates a log bucket that can be used to store log @@ -468,18 +475,18 @@ def create_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateBucket', + if "create_bucket" not in self._stubs: + self._stubs["create_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateBucket", request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['create_bucket'] + return self._stubs["create_bucket"] @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - Awaitable[logging_config.LogBucket]]: + def update_bucket( + self, + ) -> Callable[[logging_config.UpdateBucketRequest], Awaitable[logging_config.LogBucket]]: r"""Return a callable for the update bucket method over gRPC. Updates a log bucket. @@ -500,18 +507,18 @@ def update_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateBucket', + if "update_bucket" not in self._stubs: + self._stubs["update_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateBucket", request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, ) - return self._stubs['update_bucket'] + return self._stubs["update_bucket"] @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: + def delete_bucket( + self, + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. Deletes a log bucket. @@ -531,18 +538,18 @@ def delete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteBucket', + if "delete_bucket" not in self._stubs: + self._stubs["delete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_bucket'] + return self._stubs["delete_bucket"] @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - Awaitable[empty_pb2.Empty]]: + def undelete_bucket( + self, + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a log bucket. A bucket that has been @@ -559,18 +566,18 @@ def undelete_bucket(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + if "undelete_bucket" not in self._stubs: + self._stubs["undelete_bucket"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['undelete_bucket'] + return self._stubs["undelete_bucket"] @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - Awaitable[logging_config.ListViewsResponse]]: + def list_views( + self, + ) -> Callable[[logging_config.ListViewsRequest], Awaitable[logging_config.ListViewsResponse]]: r"""Return a callable for the list views method over gRPC. Lists views on a log bucket. @@ -585,18 +592,18 @@ def list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_views' not in self._stubs: - self._stubs['list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListViews', + if "list_views" not in self._stubs: + self._stubs["list_views"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListViews", request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['list_views'] + return self._stubs["list_views"] @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - Awaitable[logging_config.LogView]]: + def get_view( + self, + ) -> Callable[[logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. Gets a view on a log bucket.. @@ -611,18 +618,18 @@ def get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_view' not in self._stubs: - self._stubs['get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetView', + if "get_view" not in self._stubs: + self._stubs["get_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetView", request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['get_view'] + return self._stubs["get_view"] @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - Awaitable[logging_config.LogView]]: + def create_view( + self, + ) -> Callable[[logging_config.CreateViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the create view method over gRPC. Creates a view over log entries in a log bucket. A @@ -638,18 +645,18 @@ def create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_view' not in self._stubs: - self._stubs['create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateView', + if "create_view" not in self._stubs: + self._stubs["create_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateView", request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['create_view'] + return self._stubs["create_view"] @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - Awaitable[logging_config.LogView]]: + def update_view( + self, + ) -> Callable[[logging_config.UpdateViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the update view method over gRPC. Updates a view on a log bucket. This method replaces the @@ -668,18 +675,18 @@ def update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_view' not in self._stubs: - self._stubs['update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateView', + if "update_view" not in self._stubs: + self._stubs["update_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateView", request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['update_view'] + return self._stubs["update_view"] @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - Awaitable[empty_pb2.Empty]]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is @@ -697,18 +704,18 @@ def delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteView', + if "delete_view" not in self._stubs: + self._stubs["delete_view"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_view'] + return self._stubs["delete_view"] @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - Awaitable[logging_config.ListSinksResponse]]: + def list_sinks( + self, + ) -> Callable[[logging_config.ListSinksRequest], Awaitable[logging_config.ListSinksResponse]]: r"""Return a callable for the list sinks method over gRPC. Lists sinks. @@ -723,18 +730,18 @@ def list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListSinks', + if "list_sinks" not in self._stubs: + self._stubs["list_sinks"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListSinks", request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['list_sinks'] + return self._stubs["list_sinks"] @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - Awaitable[logging_config.LogSink]]: + def get_sink( + self, + ) -> Callable[[logging_config.GetSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the get sink method over gRPC. Gets a sink. @@ -749,18 +756,18 @@ def get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSink', + if "get_sink" not in self._stubs: + self._stubs["get_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSink", request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['get_sink'] + return self._stubs["get_sink"] @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - Awaitable[logging_config.LogSink]]: + def create_sink( + self, + ) -> Callable[[logging_config.CreateSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the create sink method over gRPC. Creates a sink that exports specified log entries to a @@ -779,18 +786,18 @@ def create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateSink', + if "create_sink" not in self._stubs: + self._stubs["create_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateSink", request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['create_sink'] + return self._stubs["create_sink"] @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - Awaitable[logging_config.LogSink]]: + def update_sink( + self, + ) -> Callable[[logging_config.UpdateSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the update sink method over gRPC. Updates a sink. This method replaces the following fields in the @@ -810,18 +817,18 @@ def update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSink', + if "update_sink" not in self._stubs: + self._stubs["update_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSink", request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['update_sink'] + return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - Awaitable[empty_pb2.Empty]]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -837,18 +844,18 @@ def delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteSink', + if "delete_sink" not in self._stubs: + self._stubs["delete_sink"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_sink'] + return self._stubs["delete_sink"] @property - def create_link(self) -> Callable[ - [logging_config.CreateLinkRequest], - Awaitable[operations_pb2.Operation]]: + def create_link( + self, + ) -> Callable[[logging_config.CreateLinkRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create link method over gRPC. Asynchronously creates a linked dataset in BigQuery @@ -866,18 +873,18 @@ def create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_link' not in self._stubs: - self._stubs['create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateLink', + if "create_link" not in self._stubs: + self._stubs["create_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateLink", request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_link'] + return self._stubs["create_link"] @property - def delete_link(self) -> Callable[ - [logging_config.DeleteLinkRequest], - Awaitable[operations_pb2.Operation]]: + def delete_link( + self, + ) -> Callable[[logging_config.DeleteLinkRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete link method over gRPC. Deletes a link. This will also delete the @@ -893,18 +900,18 @@ def delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteLink', + if "delete_link" not in self._stubs: + self._stubs["delete_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteLink", request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_link'] + return self._stubs["delete_link"] @property - def list_links(self) -> Callable[ - [logging_config.ListLinksRequest], - Awaitable[logging_config.ListLinksResponse]]: + def list_links( + self, + ) -> Callable[[logging_config.ListLinksRequest], Awaitable[logging_config.ListLinksResponse]]: r"""Return a callable for the list links method over gRPC. Lists links. @@ -919,18 +926,18 @@ def list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_links' not in self._stubs: - self._stubs['list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListLinks', + if "list_links" not in self._stubs: + self._stubs["list_links"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListLinks", request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['list_links'] + return self._stubs["list_links"] @property - def get_link(self) -> Callable[ - [logging_config.GetLinkRequest], - Awaitable[logging_config.Link]]: + def get_link( + self, + ) -> Callable[[logging_config.GetLinkRequest], Awaitable[logging_config.Link]]: r"""Return a callable for the get link method over gRPC. Gets a link. @@ -945,18 +952,18 @@ def get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_link' not in self._stubs: - self._stubs['get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetLink', + if "get_link" not in self._stubs: + self._stubs["get_link"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetLink", request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['get_link'] + return self._stubs["get_link"] @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - Awaitable[logging_config.ListExclusionsResponse]]: + def list_exclusions( + self, + ) -> Callable[[logging_config.ListExclusionsRequest], Awaitable[logging_config.ListExclusionsResponse]]: r"""Return a callable for the list exclusions method over gRPC. Lists all the exclusions on the \_Default sink in a parent @@ -972,18 +979,18 @@ def list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/ListExclusions', + if "list_exclusions" not in self._stubs: + self._stubs["list_exclusions"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/ListExclusions", request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['list_exclusions'] + return self._stubs["list_exclusions"] @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def get_exclusion( + self, + ) -> Callable[[logging_config.GetExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the get exclusion method over gRPC. Gets the description of an exclusion in the \_Default sink. @@ -998,18 +1005,18 @@ def get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetExclusion', + if "get_exclusion" not in self._stubs: + self._stubs["get_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetExclusion", request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['get_exclusion'] + return self._stubs["get_exclusion"] @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def create_exclusion( + self, + ) -> Callable[[logging_config.CreateExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the create exclusion method over gRPC. Creates a new exclusion in the \_Default sink in a specified @@ -1026,18 +1033,18 @@ def create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CreateExclusion', + if "create_exclusion" not in self._stubs: + self._stubs["create_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CreateExclusion", request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['create_exclusion'] + return self._stubs["create_exclusion"] @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - Awaitable[logging_config.LogExclusion]]: + def update_exclusion( + self, + ) -> Callable[[logging_config.UpdateExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the update exclusion method over gRPC. Changes one or more properties of an existing exclusion in the @@ -1053,18 +1060,18 @@ def update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + if "update_exclusion" not in self._stubs: + self._stubs["update_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateExclusion", request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['update_exclusion'] + return self._stubs["update_exclusion"] @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - Awaitable[empty_pb2.Empty]]: + def delete_exclusion( + self, + ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion in the \_Default sink. @@ -1079,18 +1086,18 @@ def delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + if "delete_exclusion" not in self._stubs: + self._stubs["delete_exclusion"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_exclusion'] + return self._stubs["delete_exclusion"] @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: + def get_cmek_settings( + self, + ) -> Callable[[logging_config.GetCmekSettingsRequest], Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the get cmek settings method over gRPC. Gets the Logging CMEK settings for the given resource. @@ -1114,18 +1121,18 @@ def get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + if "get_cmek_settings" not in self._stubs: + self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetCmekSettings", request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['get_cmek_settings'] + return self._stubs["get_cmek_settings"] @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - Awaitable[logging_config.CmekSettings]]: + def update_cmek_settings( + self, + ) -> Callable[[logging_config.UpdateCmekSettingsRequest], Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the update cmek settings method over gRPC. Updates the Log Router CMEK settings for the given resource. @@ -1154,18 +1161,18 @@ def update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + if "update_cmek_settings" not in self._stubs: + self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateCmekSettings", request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['update_cmek_settings'] + return self._stubs["update_cmek_settings"] @property - def get_settings(self) -> Callable[ - [logging_config.GetSettingsRequest], - Awaitable[logging_config.Settings]]: + def get_settings( + self, + ) -> Callable[[logging_config.GetSettingsRequest], Awaitable[logging_config.Settings]]: r"""Return a callable for the get settings method over gRPC. Gets the Log Router settings for the given resource. @@ -1190,18 +1197,18 @@ def get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/GetSettings', + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['get_settings'] + return self._stubs["get_settings"] @property - def update_settings(self) -> Callable[ - [logging_config.UpdateSettingsRequest], - Awaitable[logging_config.Settings]]: + def update_settings( + self, + ) -> Callable[[logging_config.UpdateSettingsRequest], Awaitable[logging_config.Settings]]: r"""Return a callable for the update settings method over gRPC. Updates the Log Router settings for the given resource. @@ -1233,18 +1240,18 @@ def update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/UpdateSettings', + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['update_settings'] + return self._stubs["update_settings"] @property - def copy_log_entries(self) -> Callable[ - [logging_config.CopyLogEntriesRequest], - Awaitable[operations_pb2.Operation]]: + def copy_log_entries( + self, + ) -> Callable[[logging_config.CopyLogEntriesRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the copy log entries method over gRPC. Copies a set of log entries from a log bucket to a @@ -1260,16 +1267,16 @@ def copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['copy_log_entries'] + return self._stubs["copy_log_entries"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_buckets: self._wrap_method( self.list_buckets, @@ -1541,8 +1548,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1559,8 +1565,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1577,8 +1582,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -1592,6 +1596,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'ConfigServiceV2GrpcAsyncIOTransport', -) +__all__ = ("ConfigServiceV2GrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py index d9820f0906..41c0dc4fab 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -17,6 +17,6 @@ from .async_client import LoggingServiceV2AsyncClient __all__ = ( - 'LoggingServiceV2Client', - 'LoggingServiceV2AsyncClient', + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py index f84e9f6ec5..f821db9325 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -16,7 +16,21 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + AsyncIterable, + Awaitable, + AsyncIterator, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.logging_v2 import gapic_version as package_version @@ -24,8 +38,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -38,19 +52,21 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class LoggingServiceV2AsyncClient: """Service for ingesting and querying logs.""" @@ -173,12 +189,14 @@ def universe_domain(self) -> str: get_transport_class = LoggingServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the logging service v2 async client. Args: @@ -233,31 +251,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.LoggingServiceV2", "credentialsType": None, - } + }, ) - async def delete_log(self, - request: Optional[Union[logging.DeleteLogRequest, dict]] = None, - *, - log_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def delete_log( + self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be @@ -322,8 +342,7 @@ async def sample_delete_log(): flattened_params = [log_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -341,11 +360,13 @@ async def sample_delete_log(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("log_name", request.log_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -358,17 +379,18 @@ async def sample_delete_log(): metadata=metadata, ) - async def write_log_entries(self, - request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, - *, - log_name: Optional[str] = None, - resource: Optional[monitored_resource_pb2.MonitoredResource] = None, - labels: Optional[MutableMapping[str, str]] = None, - entries: Optional[MutableSequence[log_entry.LogEntry]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging.WriteLogEntriesResponse: + async def write_log_entries( + self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent @@ -513,8 +535,7 @@ async def sample_write_log_entries(): flattened_params = [log_name, resource, labels, entries] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -551,16 +572,17 @@ async def sample_write_log_entries(): # Done; return the response. return response - async def list_log_entries(self, - request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, - *, - resource_names: Optional[MutableSequence[str]] = None, - filter: Optional[str] = None, - order_by: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogEntriesAsyncPager: + async def list_log_entries( + self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see `Exporting @@ -665,8 +687,7 @@ async def sample_list_log_entries(): flattened_params = [resource_names, filter, order_by] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -711,13 +732,14 @@ async def sample_list_log_entries(): # Done; return the response. return response - async def list_monitored_resource_descriptors(self, - request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + async def list_monitored_resource_descriptors( + self, + request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -803,14 +825,15 @@ async def sample_list_monitored_resource_descriptors(): # Done; return the response. return response - async def list_logs(self, - request: Optional[Union[logging.ListLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogsAsyncPager: + async def list_logs( + self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -879,8 +902,7 @@ async def sample_list_logs(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -898,11 +920,13 @@ async def sample_list_logs(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -929,13 +953,14 @@ async def sample_list_logs(): # Done; return the response. return response - def tail_log_entries(self, - requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + def tail_log_entries( + self, + requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs. @@ -961,7 +986,7 @@ async def sample_tail_log_entries(): ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # "logging_v2.TailLogEntriesRequest" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] @@ -1048,17 +1073,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1100,17 +1126,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1155,16 +1182,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self @@ -1172,12 +1201,11 @@ async def __aenter__(self) -> "LoggingServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "LoggingServiceV2AsyncClient", -) +__all__ = ("LoggingServiceV2AsyncClient",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py index 8c37951cb5..fcad2a5e13 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -19,7 +19,21 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -28,11 +42,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +56,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -52,7 +67,7 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -65,13 +80,15 @@ class LoggingServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[LoggingServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[LoggingServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -107,9 +124,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -118,16 +133,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -141,21 +155,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -190,7 +202,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): LoggingServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -207,73 +220,116 @@ def transport(self) -> LoggingServiceV2Transport: return self._transport @staticmethod - def log_path(project: str,log: str,) -> str: + def log_path( + project: str, + log: str, + ) -> str: """Returns a fully-qualified log string.""" - return "projects/{project}/logs/{log}".format(project=project, log=log, ) + return "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) @staticmethod - def parse_log_path(path: str) -> Dict[str,str]: + def parse_log_path( + path: str, + ) -> Dict[str, str]: """Parses a log path into its component segments.""" m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -305,8 +361,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = LoggingServiceV2Client._use_client_cert_effective() @@ -436,7 +494,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -476,12 +534,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the logging service v2 client. Args: @@ -538,12 +598,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = LoggingServiceV2Client._read_environment_variables() self._client_cert_source = LoggingServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = LoggingServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -563,22 +623,22 @@ def __init__(self, *, if transport_provided: # transport is a LoggingServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(LoggingServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - LoggingServiceV2Client._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or LoggingServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -608,25 +668,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.LoggingServiceV2", "credentialsType": None, - } + }, ) - def delete_log(self, - request: Optional[Union[logging.DeleteLogRequest, dict]] = None, - *, - log_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def delete_log( + self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log entries written shortly before the delete operation might not be @@ -691,8 +754,7 @@ def sample_delete_log(): flattened_params = [log_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -709,11 +771,13 @@ def sample_delete_log(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("log_name", request.log_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -726,17 +790,18 @@ def sample_delete_log(): metadata=metadata, ) - def write_log_entries(self, - request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, - *, - log_name: Optional[str] = None, - resource: Optional[monitored_resource_pb2.MonitoredResource] = None, - labels: Optional[MutableMapping[str, str]] = None, - entries: Optional[MutableSequence[log_entry.LogEntry]] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging.WriteLogEntriesResponse: + def write_log_entries( + self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is used, directly or indirectly, by the Logging agent @@ -881,8 +946,7 @@ def sample_write_log_entries(): flattened_params = [log_name, resource, labels, entries] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -917,16 +981,17 @@ def sample_write_log_entries(): # Done; return the response. return response - def list_log_entries(self, - request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, - *, - resource_names: Optional[MutableSequence[str]] = None, - filter: Optional[str] = None, - order_by: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogEntriesPager: + def list_log_entries( + self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. For ways to export log entries, see `Exporting @@ -1031,8 +1096,7 @@ def sample_list_log_entries(): flattened_params = [resource_names, filter, order_by] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1076,13 +1140,14 @@ def sample_list_log_entries(): # Done; return the response. return response - def list_monitored_resource_descriptors(self, - request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListMonitoredResourceDescriptorsPager: + def list_monitored_resource_descriptors( + self, + request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -1168,14 +1233,15 @@ def sample_list_monitored_resource_descriptors(): # Done; return the response. return response - def list_logs(self, - request: Optional[Union[logging.ListLogsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogsPager: + def list_logs( + self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are listed. @@ -1244,8 +1310,7 @@ def sample_list_logs(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1262,11 +1327,13 @@ def sample_list_logs(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1293,13 +1360,14 @@ def sample_list_logs(): # Done; return the response. return response - def tail_log_entries(self, - requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> Iterable[logging.TailLogEntriesResponse]: + def tail_log_entries( + self, + requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading logs. @@ -1325,7 +1393,7 @@ def sample_tail_log_entries(): ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # "logging_v2.TailLogEntriesRequest" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] @@ -1425,10 +1493,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1436,7 +1501,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1481,10 +1550,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1492,7 +1558,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1540,21 +1610,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -1562,6 +1629,4 @@ def cancel_operation( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "LoggingServiceV2Client", -) +__all__ = ("LoggingServiceV2Client",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py index ee183ce805..601b6ad237 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -46,14 +47,17 @@ class ListLogEntriesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListLogEntriesResponse], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListLogEntriesResponse], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -94,7 +98,7 @@ def __iter__(self) -> Iterator[log_entry.LogEntry]: yield from page.entries def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogEntriesAsyncPager: @@ -114,14 +118,17 @@ class ListLogEntriesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], - request: logging.ListLogEntriesRequest, - response: logging.ListLogEntriesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -156,6 +163,7 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): async for page in self.pages: @@ -165,7 +173,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListMonitoredResourceDescriptorsPager: @@ -185,14 +193,17 @@ class ListMonitoredResourceDescriptorsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -233,7 +244,7 @@ def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescripto yield from page.resource_descriptors def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListMonitoredResourceDescriptorsAsyncPager: @@ -253,14 +264,17 @@ class ListMonitoredResourceDescriptorsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], - request: logging.ListMonitoredResourceDescriptorsRequest, - response: logging.ListMonitoredResourceDescriptorsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -295,6 +309,7 @@ async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsR self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: @@ -304,7 +319,7 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogsPager: @@ -324,14 +339,17 @@ class ListLogsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging.ListLogsResponse], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging.ListLogsResponse], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -372,7 +390,7 @@ def __iter__(self) -> Iterator[str]: yield from page.log_names def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogsAsyncPager: @@ -392,14 +410,17 @@ class ListLogsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging.ListLogsResponse]], - request: logging.ListLogsRequest, - response: logging.ListLogsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging.ListLogsResponse]], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -434,6 +455,7 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: @@ -443,4 +465,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 25058513ec..48f0b711cd 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -23,11 +23,11 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] -_transport_registry['grpc'] = LoggingServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = LoggingServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport __all__ = ( - 'LoggingServiceV2Transport', - 'LoggingServiceV2GrpcTransport', - 'LoggingServiceV2GrpcAsyncIOTransport', + "LoggingServiceV2Transport", + "LoggingServiceV2GrpcTransport", + "LoggingServiceV2GrpcAsyncIOTransport", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index f9d40b5621..d6471e4ac4 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -24,11 +24,11 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -40,33 +40,36 @@ class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) + # fmt: on - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -102,10 +105,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -113,15 +116,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -242,70 +249,100 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ raise NotImplementedError() + # fmt: off @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_log( + self, + ) -> Callable[ + [logging.DeleteLogRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Union[ - logging.WriteLogEntriesResponse, - Awaitable[logging.WriteLogEntriesResponse] - ]]: + def write_log_entries( + self, + ) -> Callable[ + [logging.WriteLogEntriesRequest], + Union[ + logging.WriteLogEntriesResponse, + Awaitable[logging.WriteLogEntriesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Union[ - logging.ListLogEntriesResponse, - Awaitable[logging.ListLogEntriesResponse] - ]]: + def list_log_entries( + self, + ) -> Callable[ + [logging.ListLogEntriesRequest], + Union[ + logging.ListLogEntriesResponse, + Awaitable[logging.ListLogEntriesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Union[ - logging.ListMonitoredResourceDescriptorsResponse, - Awaitable[logging.ListMonitoredResourceDescriptorsResponse] - ]]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Union[ + logging.ListMonitoredResourceDescriptorsResponse, + Awaitable[logging.ListMonitoredResourceDescriptorsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Union[ - logging.ListLogsResponse, - Awaitable[logging.ListLogsResponse] - ]]: + def list_logs( + self, + ) -> Callable[ + [logging.ListLogsRequest], + Union[ + logging.ListLogsResponse, + Awaitable[logging.ListLogsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Union[ - logging.TailLogEntriesResponse, - Awaitable[logging.TailLogEntriesResponse] - ]]: + def tail_log_entries( + self, + ) -> Callable[ + [logging.TailLogEntriesRequest], + Union[ + logging.TailLogEntriesResponse, + Awaitable[logging.TailLogEntriesResponse] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -319,19 +356,13 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property @@ -339,6 +370,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'LoggingServiceV2Transport', -) +__all__ = ("LoggingServiceV2Transport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 8edc617027..7d9e6f3c67 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -21,7 +21,7 @@ from google.api_core import grpc_helpers from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -31,12 +31,13 @@ import proto # type: ignore from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -56,10 +57,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -67,7 +70,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -93,7 +96,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -115,28 +118,31 @@ class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -210,7 +216,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -219,7 +226,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -254,19 +262,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -302,19 +312,18 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - empty_pb2.Empty]: + def delete_log( + self, + ) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log for the \_Default Log @@ -333,18 +342,18 @@ def delete_log(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log'] + return self._stubs["delete_log"] @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - logging.WriteLogEntriesResponse]: + def write_log_entries( + self, + ) -> Callable[[logging.WriteLogEntriesRequest], logging.WriteLogEntriesResponse]: r"""Return a callable for the write log entries method over gRPC. Writes log entries to Logging. This API method is the @@ -365,18 +374,18 @@ def write_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, ) - return self._stubs['write_log_entries'] + return self._stubs["write_log_entries"] @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - logging.ListLogEntriesResponse]: + def list_log_entries( + self, + ) -> Callable[[logging.ListLogEntriesRequest], logging.ListLogEntriesResponse]: r"""Return a callable for the list log entries method over gRPC. Lists log entries. Use this method to retrieve log entries that @@ -394,18 +403,18 @@ def list_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, ) - return self._stubs['list_log_entries'] + return self._stubs["list_log_entries"] @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - logging.ListMonitoredResourceDescriptorsResponse]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[[logging.ListMonitoredResourceDescriptorsRequest], logging.ListMonitoredResourceDescriptorsResponse]: r"""Return a callable for the list monitored resource descriptors method over gRPC. @@ -422,18 +431,18 @@ def list_monitored_resource_descriptors(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs["list_monitored_resource_descriptors"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, ) - return self._stubs['list_monitored_resource_descriptors'] + return self._stubs["list_monitored_resource_descriptors"] @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - logging.ListLogsResponse]: + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], logging.ListLogsResponse]: r"""Return a callable for the list logs method over gRPC. Lists the logs in projects, organizations, folders, @@ -450,18 +459,18 @@ def list_logs(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, ) - return self._stubs['list_logs'] + return self._stubs["list_logs"] @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - logging.TailLogEntriesResponse]: + def tail_log_entries( + self, + ) -> Callable[[logging.TailLogEntriesRequest], logging.TailLogEntriesResponse]: r"""Return a callable for the tail log entries method over gRPC. Streaming read of log entries as they are ingested. @@ -478,13 +487,13 @@ def tail_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) - return self._stubs['tail_log_entries'] + return self._stubs["tail_log_entries"] def close(self): self._logged_channel.close() @@ -493,8 +502,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -511,8 +519,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -529,8 +536,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -548,6 +554,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'LoggingServiceV2GrpcTransport', -) +__all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 92aa1d5256..116639c3bc 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -24,23 +24,24 @@ from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -60,10 +61,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -71,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -97,7 +100,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.LoggingServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -124,13 +127,15 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -161,29 +166,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -257,7 +264,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -266,7 +274,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -318,9 +327,9 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - Awaitable[empty_pb2.Empty]]: + def delete_log( + self, + ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log for the \_Default Log @@ -339,18 +348,18 @@ def delete_log(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/DeleteLog', + if "delete_log" not in self._stubs: + self._stubs["delete_log"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log'] + return self._stubs["delete_log"] @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - Awaitable[logging.WriteLogEntriesResponse]]: + def write_log_entries( + self, + ) -> Callable[[logging.WriteLogEntriesRequest], Awaitable[logging.WriteLogEntriesResponse]]: r"""Return a callable for the write log entries method over gRPC. Writes log entries to Logging. This API method is the @@ -371,18 +380,18 @@ def write_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + if "write_log_entries" not in self._stubs: + self._stubs["write_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/WriteLogEntries", request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, ) - return self._stubs['write_log_entries'] + return self._stubs["write_log_entries"] @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - Awaitable[logging.ListLogEntriesResponse]]: + def list_log_entries( + self, + ) -> Callable[[logging.ListLogEntriesRequest], Awaitable[logging.ListLogEntriesResponse]]: r"""Return a callable for the list log entries method over gRPC. Lists log entries. Use this method to retrieve log entries that @@ -400,18 +409,18 @@ def list_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogEntries', + if "list_log_entries" not in self._stubs: + self._stubs["list_log_entries"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogEntries", request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, ) - return self._stubs['list_log_entries'] + return self._stubs["list_log_entries"] @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: + def list_monitored_resource_descriptors( + self, + ) -> Callable[[logging.ListMonitoredResourceDescriptorsRequest], Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: r"""Return a callable for the list monitored resource descriptors method over gRPC. @@ -428,18 +437,18 @@ def list_monitored_resource_descriptors(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + if "list_monitored_resource_descriptors" not in self._stubs: + self._stubs["list_monitored_resource_descriptors"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, ) - return self._stubs['list_monitored_resource_descriptors'] + return self._stubs["list_monitored_resource_descriptors"] @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - Awaitable[logging.ListLogsResponse]]: + def list_logs( + self, + ) -> Callable[[logging.ListLogsRequest], Awaitable[logging.ListLogsResponse]]: r"""Return a callable for the list logs method over gRPC. Lists the logs in projects, organizations, folders, @@ -456,18 +465,18 @@ def list_logs(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self._logged_channel.unary_unary( - '/google.logging.v2.LoggingServiceV2/ListLogs', + if "list_logs" not in self._stubs: + self._stubs["list_logs"] = self._logged_channel.unary_unary( + "/google.logging.v2.LoggingServiceV2/ListLogs", request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, ) - return self._stubs['list_logs'] + return self._stubs["list_logs"] @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - Awaitable[logging.TailLogEntriesResponse]]: + def tail_log_entries( + self, + ) -> Callable[[logging.TailLogEntriesRequest], Awaitable[logging.TailLogEntriesResponse]]: r"""Return a callable for the tail log entries method over gRPC. Streaming read of log entries as they are ingested. @@ -484,16 +493,16 @@ def tail_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( - '/google.logging.v2.LoggingServiceV2/TailLogEntries', + if "tail_log_entries" not in self._stubs: + self._stubs["tail_log_entries"] = self._logged_channel.stream_stream( + "/google.logging.v2.LoggingServiceV2/TailLogEntries", request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, ) - return self._stubs['tail_log_entries'] + return self._stubs["tail_log_entries"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.delete_log: self._wrap_method( self.delete_log, @@ -624,8 +633,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -642,8 +650,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -660,8 +667,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -675,6 +681,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'LoggingServiceV2GrpcAsyncIOTransport', -) +__all__ = ("LoggingServiceV2GrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 27ad0c0622..2d76e8d6cc 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -17,6 +17,6 @@ from .async_client import BaseMetricsServiceV2AsyncClient __all__ = ( - 'BaseMetricsServiceV2Client', - 'BaseMetricsServiceV2AsyncClient', + "BaseMetricsServiceV2Client", + "BaseMetricsServiceV2AsyncClient", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 5f0bf0c782..d225a7ecca 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.logging_v2 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -38,7 +49,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -46,12 +57,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class BaseMetricsServiceV2AsyncClient: """Service for configuring logs-based metrics.""" @@ -174,12 +187,14 @@ def universe_domain(self) -> str: get_transport_class = BaseMetricsServiceV2Client.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the base metrics service v2 async client. Args: @@ -234,31 +249,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.BaseMetricsServiceV2AsyncClient`.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.logging.v2.MetricsServiceV2", "credentialsType": None, - } + }, ) - async def _list_log_metrics(self, - request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogMetricsAsyncPager: + async def _list_log_metrics( + self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. .. code-block:: python @@ -325,8 +342,7 @@ async def sample_list_log_metrics(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -344,11 +360,13 @@ async def sample_list_log_metrics(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -375,14 +393,15 @@ async def sample_list_log_metrics(): # Done; return the response. return response - async def _get_log_metric(self, - request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def _get_log_metric( + self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. .. code-block:: python @@ -454,8 +473,7 @@ async def sample_get_log_metric(): flattened_params = [metric_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -473,11 +491,13 @@ async def sample_get_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -493,15 +513,16 @@ async def sample_get_log_metric(): # Done; return the response. return response - async def _create_log_metric(self, - request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, - *, - parent: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def _create_log_metric( + self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. .. code-block:: python @@ -589,8 +610,7 @@ async def sample_create_log_metric(): flattened_params = [parent, metric] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -610,11 +630,13 @@ async def sample_create_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -630,15 +652,16 @@ async def sample_create_log_metric(): # Done; return the response. return response - async def _update_log_metric(self, - request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + async def _update_log_metric( + self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. .. code-block:: python @@ -725,8 +748,7 @@ async def sample_update_log_metric(): flattened_params = [metric_name, metric] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -746,11 +768,13 @@ async def sample_update_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -766,14 +790,15 @@ async def sample_update_log_metric(): # Done; return the response. return response - async def _delete_log_metric(self, - request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + async def _delete_log_metric( + self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a logs-based metric. .. code-block:: python @@ -826,8 +851,7 @@ async def sample_delete_log_metric(): flattened_params = [metric_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -845,11 +869,13 @@ async def sample_delete_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -899,17 +925,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -951,17 +978,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1006,16 +1034,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def __aenter__(self) -> "BaseMetricsServiceV2AsyncClient": return self @@ -1023,12 +1053,11 @@ async def __aenter__(self) -> "BaseMetricsServiceV2AsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BaseMetricsServiceV2AsyncClient", -) +__all__ = ("BaseMetricsServiceV2AsyncClient",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py index 479ae7c032..7a368b217d 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -52,7 +65,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport @@ -66,13 +79,15 @@ class BaseMetricsServiceV2ClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetricsServiceV2Transport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MetricsServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -108,9 +123,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -119,16 +132,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -142,21 +154,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -191,7 +201,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): BaseMetricsServiceV2Client: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -208,73 +219,116 @@ def transport(self) -> MetricsServiceV2Transport: return self._transport @staticmethod - def log_metric_path(project: str,metric: str,) -> str: + def log_metric_path( + project: str, + metric: str, + ) -> str: """Returns a fully-qualified log_metric string.""" - return "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + return "projects/{project}/metrics/{metric}".format( + project=project, + metric=metric, + ) @staticmethod - def parse_log_metric_path(path: str) -> Dict[str,str]: + def parse_log_metric_path( + path: str, + ) -> Dict[str, str]: """Parses a log_metric path into its component segments.""" m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -306,8 +360,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = BaseMetricsServiceV2Client._use_client_cert_effective() @@ -437,7 +493,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -477,12 +533,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the base metrics service v2 client. Args: @@ -539,12 +597,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BaseMetricsServiceV2Client._read_environment_variables() self._client_cert_source = BaseMetricsServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = BaseMetricsServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -564,22 +622,22 @@ def __init__(self, *, if transport_provided: # transport is a MetricsServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(MetricsServiceV2Transport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - BaseMetricsServiceV2Client._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or BaseMetricsServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: import google.auth._default # type: ignore @@ -609,25 +667,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.logging_v2.BaseMetricsServiceV2Client`.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.logging.v2.MetricsServiceV2", "credentialsType": None, - } + }, ) - def _list_log_metrics(self, - request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListLogMetricsPager: + def _list_log_metrics( + self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. .. code-block:: python @@ -694,8 +755,7 @@ def sample_list_log_metrics(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -712,11 +772,13 @@ def sample_list_log_metrics(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -743,14 +805,15 @@ def sample_list_log_metrics(): # Done; return the response. return response - def _get_log_metric(self, - request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def _get_log_metric( + self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. .. code-block:: python @@ -822,8 +885,7 @@ def sample_get_log_metric(): flattened_params = [metric_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -840,11 +902,13 @@ def sample_get_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -860,15 +924,16 @@ def sample_get_log_metric(): # Done; return the response. return response - def _create_log_metric(self, - request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, - *, - parent: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def _create_log_metric( + self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. .. code-block:: python @@ -956,8 +1021,7 @@ def sample_create_log_metric(): flattened_params = [parent, metric] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -976,11 +1040,13 @@ def sample_create_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -996,15 +1062,16 @@ def sample_create_log_metric(): # Done; return the response. return response - def _update_log_metric(self, - request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - metric: Optional[logging_metrics.LogMetric] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> logging_metrics.LogMetric: + def _update_log_metric( + self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. .. code-block:: python @@ -1091,8 +1158,7 @@ def sample_update_log_metric(): flattened_params = [metric_name, metric] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1111,11 +1177,13 @@ def sample_update_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1131,14 +1199,15 @@ def sample_update_log_metric(): # Done; return the response. return response - def _delete_log_metric(self, - request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, - *, - metric_name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> None: + def _delete_log_metric( + self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Deletes a logs-based metric. .. code-block:: python @@ -1191,8 +1260,7 @@ def sample_delete_log_metric(): flattened_params = [metric_name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1209,11 +1277,13 @@ def sample_delete_log_metric(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("metric_name", request.metric_name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1276,10 +1346,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1287,7 +1354,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1332,10 +1403,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1343,7 +1411,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1391,21 +1463,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - - - - + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -1413,6 +1482,4 @@ def cancel_operation( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "BaseMetricsServiceV2Client", -) +__all__ = ("BaseMetricsServiceV2Client",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 3d44cf6e4c..71c24ff76e 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -44,14 +45,17 @@ class ListLogMetricsPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., logging_metrics.ListLogMetricsResponse], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., logging_metrics.ListLogMetricsResponse], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -92,7 +96,7 @@ def __iter__(self) -> Iterator[logging_metrics.LogMetric]: yield from page.metrics def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListLogMetricsAsyncPager: @@ -112,14 +116,17 @@ class ListLogMetricsAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], - request: logging_metrics.ListLogMetricsRequest, - response: logging_metrics.ListLogMetricsResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -154,6 +161,7 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): async for page in self.pages: @@ -163,4 +171,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index a6eb39e80f..4975feb994 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -23,11 +23,11 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] -_transport_registry['grpc'] = MetricsServiceV2GrpcTransport -_transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport +_transport_registry["grpc"] = MetricsServiceV2GrpcTransport +_transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport __all__ = ( - 'MetricsServiceV2Transport', - 'MetricsServiceV2GrpcTransport', - 'MetricsServiceV2GrpcAsyncIOTransport', + "MetricsServiceV2Transport", + "MetricsServiceV2GrpcTransport", + "MetricsServiceV2GrpcAsyncIOTransport", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index fef46ac38e..b8b167f710 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -24,11 +24,11 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -40,33 +40,36 @@ class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", ) + # fmt: on - DEFAULT_HOST: str = 'logging.googleapis.com' + DEFAULT_HOST: str = "logging.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -102,10 +105,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -113,15 +116,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -215,61 +222,86 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ raise NotImplementedError() + # fmt: off @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Union[ - logging_metrics.ListLogMetricsResponse, - Awaitable[logging_metrics.ListLogMetricsResponse] - ]]: + def list_log_metrics( + self, + ) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Union[ + logging_metrics.ListLogMetricsResponse, + Awaitable[logging_metrics.ListLogMetricsResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def get_log_metric( + self, + ) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def create_log_metric( + self, + ) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Union[ - logging_metrics.LogMetric, - Awaitable[logging_metrics.LogMetric] - ]]: + def update_log_metric( + self, + ) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: + def delete_log_metric( + self, + ) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -283,19 +315,13 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property @@ -303,6 +329,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'MetricsServiceV2Transport', -) +__all__ = ("MetricsServiceV2Transport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index d2c41d01e8..fb2a98c878 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -21,7 +21,7 @@ from google.api_core import grpc_helpers from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -31,12 +31,13 @@ import proto # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -56,10 +57,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -67,7 +70,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -93,7 +96,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": client_call_details.method, "response": grpc_response, @@ -115,28 +118,31 @@ class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -210,7 +216,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -219,7 +226,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -254,19 +262,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -302,19 +312,18 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - logging_metrics.ListLogMetricsResponse]: + def list_log_metrics( + self, + ) -> Callable[[logging_metrics.ListLogMetricsRequest], logging_metrics.ListLogMetricsResponse]: r"""Return a callable for the list log metrics method over gRPC. Lists logs-based metrics. @@ -329,18 +338,18 @@ def list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['list_log_metrics'] + return self._stubs["list_log_metrics"] @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - logging_metrics.LogMetric]: + def get_log_metric( + self, + ) -> Callable[[logging_metrics.GetLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the get log metric method over gRPC. Gets a logs-based metric. @@ -355,18 +364,18 @@ def get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['get_log_metric'] + return self._stubs["get_log_metric"] @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - logging_metrics.LogMetric]: + def create_log_metric( + self, + ) -> Callable[[logging_metrics.CreateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the create log metric method over gRPC. Creates a logs-based metric. @@ -381,18 +390,18 @@ def create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['create_log_metric'] + return self._stubs["create_log_metric"] @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - logging_metrics.LogMetric]: + def update_log_metric( + self, + ) -> Callable[[logging_metrics.UpdateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the update log metric method over gRPC. Creates or updates a logs-based metric. @@ -407,18 +416,18 @@ def update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['update_log_metric'] + return self._stubs["update_log_metric"] @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - empty_pb2.Empty]: + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -433,13 +442,13 @@ def delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log_metric'] + return self._stubs["delete_log_metric"] def close(self): self._logged_channel.close() @@ -448,8 +457,7 @@ def close(self): def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -466,8 +474,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -484,8 +491,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -503,6 +509,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'MetricsServiceV2GrpcTransport', -) +__all__ = ("MetricsServiceV2GrpcTransport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 15b1ab3ad8..66c283fc46 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -24,23 +24,24 @@ from google.api_core import grpc_helpers_async from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -60,10 +61,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -71,7 +74,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -97,7 +100,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.logging.v2.MetricsServiceV2", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -124,13 +127,15 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -161,29 +166,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "logging.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'logging.googleapis.com'). + The hostname to connect to (default: "logging.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -257,7 +264,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -266,7 +274,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -318,9 +327,9 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - Awaitable[logging_metrics.ListLogMetricsResponse]]: + def list_log_metrics( + self, + ) -> Callable[[logging_metrics.ListLogMetricsRequest], Awaitable[logging_metrics.ListLogMetricsResponse]]: r"""Return a callable for the list log metrics method over gRPC. Lists logs-based metrics. @@ -335,18 +344,18 @@ def list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + if "list_log_metrics" not in self._stubs: + self._stubs["list_log_metrics"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/ListLogMetrics", request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['list_log_metrics'] + return self._stubs["list_log_metrics"] @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def get_log_metric( + self, + ) -> Callable[[logging_metrics.GetLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the get log metric method over gRPC. Gets a logs-based metric. @@ -361,18 +370,18 @@ def get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/GetLogMetric', + if "get_log_metric" not in self._stubs: + self._stubs["get_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/GetLogMetric", request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['get_log_metric'] + return self._stubs["get_log_metric"] @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def create_log_metric( + self, + ) -> Callable[[logging_metrics.CreateLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the create log metric method over gRPC. Creates a logs-based metric. @@ -387,18 +396,18 @@ def create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + if "create_log_metric" not in self._stubs: + self._stubs["create_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/CreateLogMetric", request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['create_log_metric'] + return self._stubs["create_log_metric"] @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - Awaitable[logging_metrics.LogMetric]]: + def update_log_metric( + self, + ) -> Callable[[logging_metrics.UpdateLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the update log metric method over gRPC. Creates or updates a logs-based metric. @@ -413,18 +422,18 @@ def update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + if "update_log_metric" not in self._stubs: + self._stubs["update_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['update_log_metric'] + return self._stubs["update_log_metric"] @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - Awaitable[empty_pb2.Empty]]: + def delete_log_metric( + self, + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -439,16 +448,16 @@ def delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + if "delete_log_metric" not in self._stubs: + self._stubs["delete_log_metric"] = self._logged_channel.unary_unary( + "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['delete_log_metric'] + return self._stubs["delete_log_metric"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_log_metrics: self._wrap_method( self.list_log_metrics, @@ -552,8 +561,7 @@ def kind(self) -> str: def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -570,8 +578,7 @@ def cancel_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -588,8 +595,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -603,6 +609,4 @@ def list_operations( return self._stubs["list_operations"] -__all__ = ( - 'MetricsServiceV2GrpcAsyncIOTransport', -) +__all__ = ("MetricsServiceV2GrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py index 3023b14aa8..efea793076 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py @@ -99,80 +99,80 @@ ) __all__ = ( - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', - 'DeleteLogRequest', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'BigQueryDataset', - 'BigQueryOptions', - 'BucketMetadata', - 'CmekSettings', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesRequest', - 'CopyLogEntriesResponse', - 'CreateBucketRequest', - 'CreateExclusionRequest', - 'CreateLinkRequest', - 'CreateSinkRequest', - 'CreateViewRequest', - 'DeleteBucketRequest', - 'DeleteExclusionRequest', - 'DeleteLinkRequest', - 'DeleteSinkRequest', - 'DeleteViewRequest', - 'GetBucketRequest', - 'GetCmekSettingsRequest', - 'GetExclusionRequest', - 'GetLinkRequest', - 'GetSettingsRequest', - 'GetSinkRequest', - 'GetViewRequest', - 'IndexConfig', - 'Link', - 'LinkMetadata', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'ListLinksRequest', - 'ListLinksResponse', - 'ListSinksRequest', - 'ListSinksResponse', - 'ListViewsRequest', - 'ListViewsResponse', - 'LocationMetadata', - 'LogBucket', - 'LogExclusion', - 'LogSink', - 'LogView', - 'Settings', - 'UndeleteBucketRequest', - 'UpdateBucketRequest', - 'UpdateCmekSettingsRequest', - 'UpdateExclusionRequest', - 'UpdateSettingsRequest', - 'UpdateSinkRequest', - 'UpdateViewRequest', - 'IndexType', - 'LifecycleState', - 'OperationState', - 'CreateLogMetricRequest', - 'DeleteLogMetricRequest', - 'GetLogMetricRequest', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'LogMetric', - 'UpdateLogMetricRequest', + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py index 695393863f..8ff2da4023 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py @@ -27,15 +27,17 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'LogEntry', - 'LogEntryOperation', - 'LogEntrySourceLocation', - 'LogSplit', + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", }, ) +# fmt: on class LogEntry(proto.Message): @@ -249,18 +251,18 @@ class LogEntry(proto.Message): proto_payload: any_pb2.Any = proto.Field( proto.MESSAGE, number=2, - oneof='payload', + oneof="payload", message=any_pb2.Any, ) text_payload: str = proto.Field( proto.STRING, number=3, - oneof='payload', + oneof="payload", ) json_payload: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=6, - oneof='payload', + oneof="payload", message=struct_pb2.Struct, ) timestamp: timestamp_pb2.Timestamp = proto.Field( @@ -292,10 +294,10 @@ class LogEntry(proto.Message): proto.STRING, number=11, ) - operation: 'LogEntryOperation' = proto.Field( + operation: "LogEntryOperation" = proto.Field( proto.MESSAGE, number=15, - message='LogEntryOperation', + message="LogEntryOperation", ) trace: str = proto.Field( proto.STRING, @@ -309,15 +311,15 @@ class LogEntry(proto.Message): proto.BOOL, number=30, ) - source_location: 'LogEntrySourceLocation' = proto.Field( + source_location: "LogEntrySourceLocation" = proto.Field( proto.MESSAGE, number=23, - message='LogEntrySourceLocation', + message="LogEntrySourceLocation", ) - split: 'LogSplit' = proto.Field( + split: "LogSplit" = proto.Field( proto.MESSAGE, number=35, - message='LogSplit', + message="LogSplit", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py index 5b1dd80ceb..0c2d9735f9 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py @@ -25,23 +25,25 @@ from google.rpc import status_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'DeleteLogRequest', - 'WriteLogEntriesRequest', - 'WriteLogEntriesResponse', - 'WriteLogEntriesPartialErrors', - 'ListLogEntriesRequest', - 'ListLogEntriesResponse', - 'ListMonitoredResourceDescriptorsRequest', - 'ListMonitoredResourceDescriptorsResponse', - 'ListLogsRequest', - 'ListLogsResponse', - 'TailLogEntriesRequest', - 'TailLogEntriesResponse', + "DeleteLogRequest", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "ListLogsRequest", + "ListLogsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", }, ) +# fmt: on class DeleteLogRequest(proto.Message): @@ -191,8 +193,7 @@ class WriteLogEntriesRequest(proto.Message): class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. - """ + r"""Result returned from WriteLogEntries.""" class WriteLogEntriesPartialErrors(proto.Message): @@ -556,6 +557,7 @@ class SuppressionInfo(proto.Message): A lower bound on the count of entries omitted due to ``reason``. """ + class Reason(proto.Enum): r"""An indicator of why entries were omitted. @@ -575,10 +577,10 @@ class Reason(proto.Enum): RATE_LIMIT = 1 NOT_CONSUMED = 2 - reason: 'TailLogEntriesResponse.SuppressionInfo.Reason' = proto.Field( + reason: "TailLogEntriesResponse.SuppressionInfo.Reason" = proto.Field( proto.ENUM, number=1, - enum='TailLogEntriesResponse.SuppressionInfo.Reason', + enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) suppressed_count: int = proto.Field( proto.INT32, diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py index 292f690774..38e6295c6f 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py @@ -23,64 +23,66 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'OperationState', - 'LifecycleState', - 'IndexType', - 'IndexConfig', - 'LogBucket', - 'LogView', - 'LogSink', - 'BigQueryDataset', - 'Link', - 'BigQueryOptions', - 'ListBucketsRequest', - 'ListBucketsResponse', - 'CreateBucketRequest', - 'UpdateBucketRequest', - 'GetBucketRequest', - 'DeleteBucketRequest', - 'UndeleteBucketRequest', - 'ListViewsRequest', - 'ListViewsResponse', - 'CreateViewRequest', - 'UpdateViewRequest', - 'GetViewRequest', - 'DeleteViewRequest', - 'ListSinksRequest', - 'ListSinksResponse', - 'GetSinkRequest', - 'CreateSinkRequest', - 'UpdateSinkRequest', - 'DeleteSinkRequest', - 'CreateLinkRequest', - 'DeleteLinkRequest', - 'ListLinksRequest', - 'ListLinksResponse', - 'GetLinkRequest', - 'LogExclusion', - 'ListExclusionsRequest', - 'ListExclusionsResponse', - 'GetExclusionRequest', - 'CreateExclusionRequest', - 'UpdateExclusionRequest', - 'DeleteExclusionRequest', - 'GetCmekSettingsRequest', - 'UpdateCmekSettingsRequest', - 'CmekSettings', - 'GetSettingsRequest', - 'UpdateSettingsRequest', - 'Settings', - 'CopyLogEntriesRequest', - 'CopyLogEntriesMetadata', - 'CopyLogEntriesResponse', - 'BucketMetadata', - 'LinkMetadata', - 'LocationMetadata', + "OperationState", + "LifecycleState", + "IndexType", + "IndexConfig", + "LogBucket", + "LogView", + "LogSink", + "BigQueryDataset", + "Link", + "BigQueryOptions", + "ListBucketsRequest", + "ListBucketsResponse", + "CreateBucketRequest", + "UpdateBucketRequest", + "GetBucketRequest", + "DeleteBucketRequest", + "UndeleteBucketRequest", + "ListViewsRequest", + "ListViewsResponse", + "CreateViewRequest", + "UpdateViewRequest", + "GetViewRequest", + "DeleteViewRequest", + "ListSinksRequest", + "ListSinksResponse", + "GetSinkRequest", + "CreateSinkRequest", + "UpdateSinkRequest", + "DeleteSinkRequest", + "CreateLinkRequest", + "DeleteLinkRequest", + "ListLinksRequest", + "ListLinksResponse", + "GetLinkRequest", + "LogExclusion", + "ListExclusionsRequest", + "ListExclusionsResponse", + "GetExclusionRequest", + "CreateExclusionRequest", + "UpdateExclusionRequest", + "DeleteExclusionRequest", + "GetCmekSettingsRequest", + "UpdateCmekSettingsRequest", + "CmekSettings", + "GetSettingsRequest", + "UpdateSettingsRequest", + "Settings", + "CopyLogEntriesRequest", + "CopyLogEntriesMetadata", + "CopyLogEntriesResponse", + "BucketMetadata", + "LinkMetadata", + "LocationMetadata", }, ) +# fmt: on class OperationState(proto.Enum): @@ -191,10 +193,10 @@ class IndexConfig(proto.Message): proto.STRING, number=1, ) - type_: 'IndexType' = proto.Field( + type_: "IndexType" = proto.Field( proto.ENUM, number=2, - enum='IndexType', + enum="IndexType", ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -300,10 +302,10 @@ class LogBucket(proto.Message): proto.BOOL, number=9, ) - lifecycle_state: 'LifecycleState' = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=12, - enum='LifecycleState', + enum="LifecycleState", ) analytics_enabled: bool = proto.Field( proto.BOOL, @@ -313,15 +315,15 @@ class LogBucket(proto.Message): proto.STRING, number=15, ) - index_configs: MutableSequence['IndexConfig'] = proto.RepeatedField( + index_configs: MutableSequence["IndexConfig"] = proto.RepeatedField( proto.MESSAGE, number=17, - message='IndexConfig', + message="IndexConfig", ) - cmek_settings: 'CmekSettings' = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=19, - message='CmekSettings', + message="CmekSettings", ) @@ -500,6 +502,7 @@ class LogSink(proto.Message): sink. This field may not be present for older sinks. """ + class VersionFormat(proto.Enum): r"""Deprecated. This is unused. @@ -536,10 +539,10 @@ class VersionFormat(proto.Enum): proto.BOOL, number=19, ) - exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=16, - message='LogExclusion', + message="LogExclusion", ) output_version_format: VersionFormat = proto.Field( proto.ENUM, @@ -554,11 +557,11 @@ class VersionFormat(proto.Enum): proto.BOOL, number=9, ) - bigquery_options: 'BigQueryOptions' = proto.Field( + bigquery_options: "BigQueryOptions" = proto.Field( proto.MESSAGE, number=12, - oneof='options', - message='BigQueryOptions', + oneof="options", + message="BigQueryOptions", ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, @@ -644,15 +647,15 @@ class Link(proto.Message): number=3, message=timestamp_pb2.Timestamp, ) - lifecycle_state: 'LifecycleState' = proto.Field( + lifecycle_state: "LifecycleState" = proto.Field( proto.ENUM, number=4, - enum='LifecycleState', + enum="LifecycleState", ) - bigquery_dataset: 'BigQueryDataset' = proto.Field( + bigquery_dataset: "BigQueryDataset" = proto.Field( proto.MESSAGE, number=5, - message='BigQueryDataset', + message="BigQueryDataset", ) @@ -755,10 +758,10 @@ class ListBucketsResponse(proto.Message): def raw_page(self): return self - buckets: MutableSequence['LogBucket'] = proto.RepeatedField( + buckets: MutableSequence["LogBucket"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogBucket', + message="LogBucket", ) next_page_token: str = proto.Field( proto.STRING, @@ -800,10 +803,10 @@ class CreateBucketRequest(proto.Message): proto.STRING, number=2, ) - bucket: 'LogBucket' = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=3, - message='LogBucket', + message="LogBucket", ) @@ -842,10 +845,10 @@ class UpdateBucketRequest(proto.Message): proto.STRING, number=1, ) - bucket: 'LogBucket' = proto.Field( + bucket: "LogBucket" = proto.Field( proto.MESSAGE, number=2, - message='LogBucket', + message="LogBucket", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -985,10 +988,10 @@ class ListViewsResponse(proto.Message): def raw_page(self): return self - views: MutableSequence['LogView'] = proto.RepeatedField( + views: MutableSequence["LogView"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogView', + message="LogView", ) next_page_token: str = proto.Field( proto.STRING, @@ -1027,10 +1030,10 @@ class CreateViewRequest(proto.Message): proto.STRING, number=2, ) - view: 'LogView' = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=3, - message='LogView', + message="LogView", ) @@ -1066,10 +1069,10 @@ class UpdateViewRequest(proto.Message): proto.STRING, number=1, ) - view: 'LogView' = proto.Field( + view: "LogView" = proto.Field( proto.MESSAGE, number=2, - message='LogView', + message="LogView", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -1181,10 +1184,10 @@ class ListSinksResponse(proto.Message): def raw_page(self): return self - sinks: MutableSequence['LogSink'] = proto.RepeatedField( + sinks: MutableSequence["LogSink"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogSink', + message="LogSink", ) next_page_token: str = proto.Field( proto.STRING, @@ -1259,10 +1262,10 @@ class CreateSinkRequest(proto.Message): proto.STRING, number=1, ) - sink: 'LogSink' = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, - message='LogSink', + message="LogSink", ) unique_writer_identity: bool = proto.Field( proto.BOOL, @@ -1331,10 +1334,10 @@ class UpdateSinkRequest(proto.Message): proto.STRING, number=1, ) - sink: 'LogSink' = proto.Field( + sink: "LogSink" = proto.Field( proto.MESSAGE, number=2, - message='LogSink', + message="LogSink", ) unique_writer_identity: bool = proto.Field( proto.BOOL, @@ -1399,10 +1402,10 @@ class CreateLinkRequest(proto.Message): proto.STRING, number=1, ) - link: 'Link' = proto.Field( + link: "Link" = proto.Field( proto.MESSAGE, number=2, - message='Link', + message="Link", ) link_id: str = proto.Field( proto.STRING, @@ -1481,10 +1484,10 @@ class ListLinksResponse(proto.Message): def raw_page(self): return self - links: MutableSequence['Link'] = proto.RepeatedField( + links: MutableSequence["Link"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Link', + message="Link", ) next_page_token: str = proto.Field( proto.STRING, @@ -1643,10 +1646,10 @@ class ListExclusionsResponse(proto.Message): def raw_page(self): return self - exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + exclusions: MutableSequence["LogExclusion"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogExclusion', + message="LogExclusion", ) next_page_token: str = proto.Field( proto.STRING, @@ -1708,10 +1711,10 @@ class CreateExclusionRequest(proto.Message): proto.STRING, number=1, ) - exclusion: 'LogExclusion' = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, - message='LogExclusion', + message="LogExclusion", ) @@ -1752,10 +1755,10 @@ class UpdateExclusionRequest(proto.Message): proto.STRING, number=1, ) - exclusion: 'LogExclusion' = proto.Field( + exclusion: "LogExclusion" = proto.Field( proto.MESSAGE, number=2, - message='LogExclusion', + message="LogExclusion", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -1874,10 +1877,10 @@ class UpdateCmekSettingsRequest(proto.Message): proto.STRING, number=1, ) - cmek_settings: 'CmekSettings' = proto.Field( + cmek_settings: "CmekSettings" = proto.Field( proto.MESSAGE, number=2, - message='CmekSettings', + message="CmekSettings", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2073,10 +2076,10 @@ class UpdateSettingsRequest(proto.Message): proto.STRING, number=1, ) - settings: 'Settings' = proto.Field( + settings: "Settings" = proto.Field( proto.MESSAGE, number=2, - message='Settings', + message="Settings", ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, @@ -2249,19 +2252,19 @@ class CopyLogEntriesMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) cancellation_requested: bool = proto.Field( proto.BOOL, number=4, ) - request: 'CopyLogEntriesRequest' = proto.Field( + request: "CopyLogEntriesRequest" = proto.Field( proto.MESSAGE, number=5, - message='CopyLogEntriesRequest', + message="CopyLogEntriesRequest", ) progress: int = proto.Field( proto.INT32, @@ -2324,22 +2327,22 @@ class BucketMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) - create_bucket_request: 'CreateBucketRequest' = proto.Field( + create_bucket_request: "CreateBucketRequest" = proto.Field( proto.MESSAGE, number=4, - oneof='request', - message='CreateBucketRequest', + oneof="request", + message="CreateBucketRequest", ) - update_bucket_request: 'UpdateBucketRequest' = proto.Field( + update_bucket_request: "UpdateBucketRequest" = proto.Field( proto.MESSAGE, number=5, - oneof='request', - message='UpdateBucketRequest', + oneof="request", + message="UpdateBucketRequest", ) @@ -2380,22 +2383,22 @@ class LinkMetadata(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) - state: 'OperationState' = proto.Field( + state: "OperationState" = proto.Field( proto.ENUM, number=3, - enum='OperationState', + enum="OperationState", ) - create_link_request: 'CreateLinkRequest' = proto.Field( + create_link_request: "CreateLinkRequest" = proto.Field( proto.MESSAGE, number=4, - oneof='request', - message='CreateLinkRequest', + oneof="request", + message="CreateLinkRequest", ) - delete_link_request: 'DeleteLinkRequest' = proto.Field( + delete_link_request: "DeleteLinkRequest" = proto.Field( proto.MESSAGE, number=5, - oneof='request', - message='DeleteLinkRequest', + oneof="request", + message="DeleteLinkRequest", ) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py index d26267d8cf..97874d0eb6 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py @@ -24,18 +24,20 @@ from google.protobuf import timestamp_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.logging.v2', + package="google.logging.v2", manifest={ - 'LogMetric', - 'ListLogMetricsRequest', - 'ListLogMetricsResponse', - 'GetLogMetricRequest', - 'CreateLogMetricRequest', - 'UpdateLogMetricRequest', - 'DeleteLogMetricRequest', + "LogMetric", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "GetLogMetricRequest", + "CreateLogMetricRequest", + "UpdateLogMetricRequest", + "DeleteLogMetricRequest", }, ) +# fmt: on class LogMetric(proto.Message): @@ -180,6 +182,7 @@ class LogMetric(proto.Message): updated this metric. The v2 format is used by default and cannot be changed. """ + class ApiVersion(proto.Enum): r"""Logging API version. @@ -302,10 +305,10 @@ class ListLogMetricsResponse(proto.Message): def raw_page(self): return self - metrics: MutableSequence['LogMetric'] = proto.RepeatedField( + metrics: MutableSequence["LogMetric"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='LogMetric', + message="LogMetric", ) next_page_token: str = proto.Field( proto.STRING, @@ -353,10 +356,10 @@ class CreateLogMetricRequest(proto.Message): proto.STRING, number=1, ) - metric: 'LogMetric' = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, - message='LogMetric', + message="LogMetric", ) @@ -383,10 +386,10 @@ class UpdateLogMetricRequest(proto.Message): proto.STRING, number=1, ) - metric: 'LogMetric' = proto.Field( + metric: "LogMetric" = proto.Field( proto.MESSAGE, number=2, - message='LogMetric', + message="LogMetric", ) diff --git a/tests/integration/goldens/logging_internal/noxfile.py b/tests/integration/goldens/logging_internal/noxfile.py index cafd945c0f..70216a7e57 100755 --- a/tests/integration/goldens/logging_internal/noxfile.py +++ b/tests/integration/goldens/logging_internal/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -FORMAT_PATHS = ["google", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # We're most interested in ensuring that code is formatted properly diff --git a/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 646ec1476b..a5a5485915 100755 --- a/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -44,7 +44,7 @@ async def sample_tail_log_entries(): ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # "logging_v2.TailLogEntriesRequest" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index aab2284789..04ca57f4fd 100755 --- a/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -44,7 +44,7 @@ def sample_tail_log_entries(): ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # "logging_v2.TailLogEntriesRequest" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py index ca28dea89c..1b92bb71e9 100755 --- a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -964,7 +964,7 @@ def test_list_buckets(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_buckets(request) @@ -976,7 +976,7 @@ def test_list_buckets(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_buckets_non_empty_request_with_auto_populated_field(): @@ -991,8 +991,8 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListBucketsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1004,8 +1004,8 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListBucketsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_buckets_use_cached_wrapped_rpc(): @@ -1091,7 +1091,7 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_buckets(request) @@ -1103,7 +1103,7 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1119,7 +1119,7 @@ def test_list_buckets_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1151,7 +1151,7 @@ async def test_list_buckets_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1187,7 +1187,7 @@ def test_list_buckets_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_buckets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1195,7 +1195,7 @@ def test_list_buckets_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1209,7 +1209,7 @@ def test_list_buckets_flattened_error(): with pytest.raises(ValueError): client.list_buckets( logging_config.ListBucketsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -1229,7 +1229,7 @@ async def test_list_buckets_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_buckets( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1237,7 +1237,7 @@ async def test_list_buckets_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -1251,7 +1251,7 @@ async def test_list_buckets_flattened_error_async(): with pytest.raises(ValueError): await client.list_buckets( logging_config.ListBucketsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1471,13 +1471,13 @@ def test_get_bucket(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.get_bucket(request) @@ -1489,13 +1489,13 @@ def test_get_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_get_bucket_non_empty_request_with_auto_populated_field(): @@ -1510,7 +1510,7 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1522,7 +1522,7 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetBucketRequest( - name='name_value', + name="name_value", ) def test_get_bucket_use_cached_wrapped_rpc(): @@ -1608,13 +1608,13 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) response = await client.get_bucket(request) @@ -1626,13 +1626,13 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -1648,7 +1648,7 @@ def test_get_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1680,7 +1680,7 @@ async def test_get_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1746,8 +1746,8 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1759,8 +1759,8 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) def test_create_bucket_async_use_cached_wrapped_rpc(): @@ -1883,7 +1883,7 @@ def test_create_bucket_async_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1915,7 +1915,7 @@ async def test_create_bucket_async_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1981,7 +1981,7 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1993,7 +1993,7 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) def test_update_bucket_async_use_cached_wrapped_rpc(): @@ -2116,7 +2116,7 @@ def test_update_bucket_async_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2148,7 +2148,7 @@ async def test_update_bucket_async_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2190,13 +2190,13 @@ def test_create_bucket(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.create_bucket(request) @@ -2208,13 +2208,13 @@ def test_create_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_create_bucket_non_empty_request_with_auto_populated_field(): @@ -2229,8 +2229,8 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2242,8 +2242,8 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest( - parent='parent_value', - bucket_id='bucket_id_value', + parent="parent_value", + bucket_id="bucket_id_value", ) def test_create_bucket_use_cached_wrapped_rpc(): @@ -2329,13 +2329,13 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) response = await client.create_bucket(request) @@ -2347,13 +2347,13 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -2369,7 +2369,7 @@ def test_create_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2401,7 +2401,7 @@ async def test_create_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2443,13 +2443,13 @@ def test_update_bucket(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], ) response = client.update_bucket(request) @@ -2461,13 +2461,13 @@ def test_update_bucket(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] def test_update_bucket_non_empty_request_with_auto_populated_field(): @@ -2482,7 +2482,7 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2494,7 +2494,7 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest( - name='name_value', + name="name_value", ) def test_update_bucket_use_cached_wrapped_rpc(): @@ -2580,13 +2580,13 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) response = await client.update_bucket(request) @@ -2598,13 +2598,13 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE assert response.analytics_enabled is True - assert response.restricted_fields == ['restricted_fields_value'] + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -2620,7 +2620,7 @@ def test_update_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2652,7 +2652,7 @@ async def test_update_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2718,7 +2718,7 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2730,7 +2730,7 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteBucketRequest( - name='name_value', + name="name_value", ) def test_delete_bucket_use_cached_wrapped_rpc(): @@ -2841,7 +2841,7 @@ def test_delete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2873,7 +2873,7 @@ async def test_delete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2939,7 +2939,7 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UndeleteBucketRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2951,7 +2951,7 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UndeleteBucketRequest( - name='name_value', + name="name_value", ) def test_undelete_bucket_use_cached_wrapped_rpc(): @@ -3062,7 +3062,7 @@ def test_undelete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3094,7 +3094,7 @@ async def test_undelete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3136,7 +3136,7 @@ def test__list_views(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_views(request) @@ -3148,7 +3148,7 @@ def test__list_views(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_views_non_empty_request_with_auto_populated_field(): @@ -3163,8 +3163,8 @@ def test__list_views_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListViewsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3176,8 +3176,8 @@ def test__list_views_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListViewsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test__list_views_use_cached_wrapped_rpc(): @@ -3263,7 +3263,7 @@ async def test__list_views_async(transport: str = 'grpc_asyncio', request_type=l '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client._list_views(request) @@ -3275,7 +3275,7 @@ async def test__list_views_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -3291,7 +3291,7 @@ def test__list_views_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3323,7 +3323,7 @@ async def test__list_views_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3359,7 +3359,7 @@ def test__list_views_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_views( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3367,7 +3367,7 @@ def test__list_views_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -3381,7 +3381,7 @@ def test__list_views_flattened_error(): with pytest.raises(ValueError): client._list_views( logging_config.ListViewsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -3401,7 +3401,7 @@ async def test__list_views_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_views( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3409,7 +3409,7 @@ async def test__list_views_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -3423,7 +3423,7 @@ async def test__list_views_flattened_error_async(): with pytest.raises(ValueError): await client._list_views( logging_config.ListViewsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3643,9 +3643,9 @@ def test__get_view(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client._get_view(request) @@ -3657,9 +3657,9 @@ def test__get_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test__get_view_non_empty_request_with_auto_populated_field(): @@ -3674,7 +3674,7 @@ def test__get_view_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3686,7 +3686,7 @@ def test__get_view_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetViewRequest( - name='name_value', + name="name_value", ) def test__get_view_use_cached_wrapped_rpc(): @@ -3772,9 +3772,9 @@ async def test__get_view_async(transport: str = 'grpc_asyncio', request_type=log '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) response = await client._get_view(request) @@ -3786,9 +3786,9 @@ async def test__get_view_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio @@ -3804,7 +3804,7 @@ def test__get_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3836,7 +3836,7 @@ async def test__get_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3878,9 +3878,9 @@ def test__create_view(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client._create_view(request) @@ -3892,9 +3892,9 @@ def test__create_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test__create_view_non_empty_request_with_auto_populated_field(): @@ -3909,8 +3909,8 @@ def test__create_view_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateViewRequest( - parent='parent_value', - view_id='view_id_value', + parent="parent_value", + view_id="view_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3922,8 +3922,8 @@ def test__create_view_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateViewRequest( - parent='parent_value', - view_id='view_id_value', + parent="parent_value", + view_id="view_id_value", ) def test__create_view_use_cached_wrapped_rpc(): @@ -4009,9 +4009,9 @@ async def test__create_view_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) response = await client._create_view(request) @@ -4023,9 +4023,9 @@ async def test__create_view_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio @@ -4041,7 +4041,7 @@ def test__create_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4073,7 +4073,7 @@ async def test__create_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4115,9 +4115,9 @@ def test__update_view(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", ) response = client._update_view(request) @@ -4129,9 +4129,9 @@ def test__update_view(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" def test__update_view_non_empty_request_with_auto_populated_field(): @@ -4146,7 +4146,7 @@ def test__update_view_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4158,7 +4158,7 @@ def test__update_view_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateViewRequest( - name='name_value', + name="name_value", ) def test__update_view_use_cached_wrapped_rpc(): @@ -4244,9 +4244,9 @@ async def test__update_view_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) response = await client._update_view(request) @@ -4258,9 +4258,9 @@ async def test__update_view_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" @pytest.mark.asyncio @@ -4276,7 +4276,7 @@ def test__update_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4308,7 +4308,7 @@ async def test__update_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4374,7 +4374,7 @@ def test__delete_view_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteViewRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4386,7 +4386,7 @@ def test__delete_view_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteViewRequest( - name='name_value', + name="name_value", ) def test__delete_view_use_cached_wrapped_rpc(): @@ -4497,7 +4497,7 @@ def test__delete_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4529,7 +4529,7 @@ async def test__delete_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4571,7 +4571,7 @@ def test__list_sinks(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_sinks(request) @@ -4583,7 +4583,7 @@ def test__list_sinks(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_sinks_non_empty_request_with_auto_populated_field(): @@ -4598,8 +4598,8 @@ def test__list_sinks_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListSinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4611,8 +4611,8 @@ def test__list_sinks_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListSinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test__list_sinks_use_cached_wrapped_rpc(): @@ -4698,7 +4698,7 @@ async def test__list_sinks_async(transport: str = 'grpc_asyncio', request_type=l '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client._list_sinks(request) @@ -4710,7 +4710,7 @@ async def test__list_sinks_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -4726,7 +4726,7 @@ def test__list_sinks_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4758,7 +4758,7 @@ async def test__list_sinks_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4794,7 +4794,7 @@ def test__list_sinks_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_sinks( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4802,7 +4802,7 @@ def test__list_sinks_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -4816,7 +4816,7 @@ def test__list_sinks_flattened_error(): with pytest.raises(ValueError): client._list_sinks( logging_config.ListSinksRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -4836,7 +4836,7 @@ async def test__list_sinks_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_sinks( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -4844,7 +4844,7 @@ async def test__list_sinks_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -4858,7 +4858,7 @@ async def test__list_sinks_flattened_error_async(): with pytest.raises(ValueError): await client._list_sinks( logging_config.ListSinksRequest(), - parent='parent_value', + parent="parent_value", ) @@ -5078,13 +5078,13 @@ def test__get_sink(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client._get_sink(request) @@ -5097,13 +5097,13 @@ def test__get_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5119,7 +5119,7 @@ def test__get_sink_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5131,7 +5131,7 @@ def test__get_sink_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) def test__get_sink_use_cached_wrapped_rpc(): @@ -5217,13 +5217,13 @@ async def test__get_sink_async(transport: str = 'grpc_asyncio', request_type=log '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) response = await client._get_sink(request) @@ -5236,13 +5236,13 @@ async def test__get_sink_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5259,7 +5259,7 @@ def test__get_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5291,7 +5291,7 @@ async def test__get_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5327,7 +5327,7 @@ def test__get_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -5335,7 +5335,7 @@ def test__get_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @@ -5349,7 +5349,7 @@ def test__get_sink_flattened_error(): with pytest.raises(ValueError): client._get_sink( logging_config.GetSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) @pytest.mark.asyncio @@ -5369,7 +5369,7 @@ async def test__get_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -5377,7 +5377,7 @@ async def test__get_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -5391,7 +5391,7 @@ async def test__get_sink_flattened_error_async(): with pytest.raises(ValueError): await client._get_sink( logging_config.GetSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) @@ -5415,13 +5415,13 @@ def test__create_sink(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client._create_sink(request) @@ -5434,13 +5434,13 @@ def test__create_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5456,7 +5456,7 @@ def test__create_sink_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateSinkRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5468,7 +5468,7 @@ def test__create_sink_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateSinkRequest( - parent='parent_value', + parent="parent_value", ) def test__create_sink_use_cached_wrapped_rpc(): @@ -5554,13 +5554,13 @@ async def test__create_sink_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) response = await client._create_sink(request) @@ -5573,13 +5573,13 @@ async def test__create_sink_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5596,7 +5596,7 @@ def test__create_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5628,7 +5628,7 @@ async def test__create_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5664,8 +5664,8 @@ def test__create_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -5673,10 +5673,10 @@ def test__create_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val @@ -5690,8 +5690,8 @@ def test__create_sink_flattened_error(): with pytest.raises(ValueError): client._create_sink( logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) @pytest.mark.asyncio @@ -5711,8 +5711,8 @@ async def test__create_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._create_sink( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -5720,10 +5720,10 @@ async def test__create_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -5737,8 +5737,8 @@ async def test__create_sink_flattened_error_async(): with pytest.raises(ValueError): await client._create_sink( logging_config.CreateSinkRequest(), - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) @@ -5762,13 +5762,13 @@ def test__update_sink(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, ) response = client._update_sink(request) @@ -5781,13 +5781,13 @@ def test__update_sink(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5803,7 +5803,7 @@ def test__update_sink_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5815,7 +5815,7 @@ def test__update_sink_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) def test__update_sink_use_cached_wrapped_rpc(): @@ -5901,13 +5901,13 @@ async def test__update_sink_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) response = await client._update_sink(request) @@ -5920,13 +5920,13 @@ async def test__update_sink_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.description == "description_value" assert response.disabled is True assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' + assert response.writer_identity == "writer_identity_value" assert response.include_children is True @@ -5943,7 +5943,7 @@ def test__update_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5975,7 +5975,7 @@ async def test__update_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6011,9 +6011,9 @@ def test__update_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6021,13 +6021,13 @@ def test__update_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -6041,9 +6041,9 @@ def test__update_sink_flattened_error(): with pytest.raises(ValueError): client._update_sink( logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -6063,9 +6063,9 @@ async def test__update_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._update_sink( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -6073,13 +6073,13 @@ async def test__update_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val arg = args[0].sink - mock_val = logging_config.LogSink(name='name_value') + mock_val = logging_config.LogSink(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -6093,9 +6093,9 @@ async def test__update_sink_flattened_error_async(): with pytest.raises(ValueError): await client._update_sink( logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + sink_name="sink_name_value", + sink=logging_config.LogSink(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -6143,7 +6143,7 @@ def test__delete_sink_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6155,7 +6155,7 @@ def test__delete_sink_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteSinkRequest( - sink_name='sink_name_value', + sink_name="sink_name_value", ) def test__delete_sink_use_cached_wrapped_rpc(): @@ -6266,7 +6266,7 @@ def test__delete_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6298,7 +6298,7 @@ async def test__delete_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name_value' + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6334,7 +6334,7 @@ def test__delete_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._delete_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -6342,7 +6342,7 @@ def test__delete_sink_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @@ -6356,7 +6356,7 @@ def test__delete_sink_flattened_error(): with pytest.raises(ValueError): client._delete_sink( logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) @pytest.mark.asyncio @@ -6376,7 +6376,7 @@ async def test__delete_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._delete_sink( - sink_name='sink_name_value', + sink_name="sink_name_value", ) # Establish that the underlying call was made with the expected @@ -6384,7 +6384,7 @@ async def test__delete_sink_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].sink_name - mock_val = 'sink_name_value' + mock_val = "sink_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -6398,7 +6398,7 @@ async def test__delete_sink_flattened_error_async(): with pytest.raises(ValueError): await client._delete_sink( logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', + sink_name="sink_name_value", ) @@ -6446,8 +6446,8 @@ def test__create_link_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateLinkRequest( - parent='parent_value', - link_id='link_id_value', + parent="parent_value", + link_id="link_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6459,8 +6459,8 @@ def test__create_link_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateLinkRequest( - parent='parent_value', - link_id='link_id_value', + parent="parent_value", + link_id="link_id_value", ) def test__create_link_use_cached_wrapped_rpc(): @@ -6583,7 +6583,7 @@ def test__create_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateLinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6615,7 +6615,7 @@ async def test__create_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateLinkRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6651,9 +6651,9 @@ def test__create_link_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._create_link( - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -6661,13 +6661,13 @@ def test__create_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].link - mock_val = logging_config.Link(name='name_value') + mock_val = logging_config.Link(name="name_value") assert arg == mock_val arg = args[0].link_id - mock_val = 'link_id_value' + mock_val = "link_id_value" assert arg == mock_val @@ -6681,9 +6681,9 @@ def test__create_link_flattened_error(): with pytest.raises(ValueError): client._create_link( logging_config.CreateLinkRequest(), - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) @pytest.mark.asyncio @@ -6705,9 +6705,9 @@ async def test__create_link_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._create_link( - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) # Establish that the underlying call was made with the expected @@ -6715,13 +6715,13 @@ async def test__create_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].link - mock_val = logging_config.Link(name='name_value') + mock_val = logging_config.Link(name="name_value") assert arg == mock_val arg = args[0].link_id - mock_val = 'link_id_value' + mock_val = "link_id_value" assert arg == mock_val @pytest.mark.asyncio @@ -6735,9 +6735,9 @@ async def test__create_link_flattened_error_async(): with pytest.raises(ValueError): await client._create_link( logging_config.CreateLinkRequest(), - parent='parent_value', - link=logging_config.Link(name='name_value'), - link_id='link_id_value', + parent="parent_value", + link=logging_config.Link(name="name_value"), + link_id="link_id_value", ) @@ -6785,7 +6785,7 @@ def test__delete_link_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteLinkRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6797,7 +6797,7 @@ def test__delete_link_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteLinkRequest( - name='name_value', + name="name_value", ) def test__delete_link_use_cached_wrapped_rpc(): @@ -6920,7 +6920,7 @@ def test__delete_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6952,7 +6952,7 @@ async def test__delete_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6988,7 +6988,7 @@ def test__delete_link_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._delete_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -6996,7 +6996,7 @@ def test__delete_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7010,7 +7010,7 @@ def test__delete_link_flattened_error(): with pytest.raises(ValueError): client._delete_link( logging_config.DeleteLinkRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -7032,7 +7032,7 @@ async def test__delete_link_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._delete_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7040,7 +7040,7 @@ async def test__delete_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -7054,7 +7054,7 @@ async def test__delete_link_flattened_error_async(): with pytest.raises(ValueError): await client._delete_link( logging_config.DeleteLinkRequest(), - name='name_value', + name="name_value", ) @@ -7078,7 +7078,7 @@ def test__list_links(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_links(request) @@ -7090,7 +7090,7 @@ def test__list_links(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_links_non_empty_request_with_auto_populated_field(): @@ -7105,8 +7105,8 @@ def test__list_links_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListLinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7118,8 +7118,8 @@ def test__list_links_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListLinksRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test__list_links_use_cached_wrapped_rpc(): @@ -7205,7 +7205,7 @@ async def test__list_links_async(transport: str = 'grpc_asyncio', request_type=l '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client._list_links(request) @@ -7217,7 +7217,7 @@ async def test__list_links_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -7233,7 +7233,7 @@ def test__list_links_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListLinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7265,7 +7265,7 @@ async def test__list_links_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListLinksRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7301,7 +7301,7 @@ def test__list_links_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_links( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7309,7 +7309,7 @@ def test__list_links_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -7323,7 +7323,7 @@ def test__list_links_flattened_error(): with pytest.raises(ValueError): client._list_links( logging_config.ListLinksRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -7343,7 +7343,7 @@ async def test__list_links_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_links( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -7351,7 +7351,7 @@ async def test__list_links_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -7365,7 +7365,7 @@ async def test__list_links_flattened_error_async(): with pytest.raises(ValueError): await client._list_links( logging_config.ListLinksRequest(), - parent='parent_value', + parent="parent_value", ) @@ -7585,8 +7585,8 @@ def test__get_link(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link( - name='name_value', - description='description_value', + name="name_value", + description="description_value", lifecycle_state=logging_config.LifecycleState.ACTIVE, ) response = client._get_link(request) @@ -7599,8 +7599,8 @@ def test__get_link(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -7616,7 +7616,7 @@ def test__get_link_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetLinkRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7628,7 +7628,7 @@ def test__get_link_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetLinkRequest( - name='name_value', + name="name_value", ) def test__get_link_use_cached_wrapped_rpc(): @@ -7714,8 +7714,8 @@ async def test__get_link_async(transport: str = 'grpc_asyncio', request_type=log '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', + name="name_value", + description="description_value", lifecycle_state=logging_config.LifecycleState.ACTIVE, )) response = await client._get_link(request) @@ -7728,8 +7728,8 @@ async def test__get_link_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) - assert response.name == 'name_value' - assert response.description == 'description_value' + assert response.name == "name_value" + assert response.description == "description_value" assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -7746,7 +7746,7 @@ def test__get_link_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7778,7 +7778,7 @@ async def test__get_link_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetLinkRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -7814,7 +7814,7 @@ def test__get_link_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7822,7 +7822,7 @@ def test__get_link_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -7836,7 +7836,7 @@ def test__get_link_flattened_error(): with pytest.raises(ValueError): client._get_link( logging_config.GetLinkRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -7856,7 +7856,7 @@ async def test__get_link_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_link( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -7864,7 +7864,7 @@ async def test__get_link_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -7878,7 +7878,7 @@ async def test__get_link_flattened_error_async(): with pytest.raises(ValueError): await client._get_link( logging_config.GetLinkRequest(), - name='name_value', + name="name_value", ) @@ -7902,7 +7902,7 @@ def test__list_exclusions(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_exclusions(request) @@ -7914,7 +7914,7 @@ def test__list_exclusions(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_exclusions_non_empty_request_with_auto_populated_field(): @@ -7929,8 +7929,8 @@ def test__list_exclusions_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.ListExclusionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7942,8 +7942,8 @@ def test__list_exclusions_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListExclusionsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test__list_exclusions_use_cached_wrapped_rpc(): @@ -8029,7 +8029,7 @@ async def test__list_exclusions_async(transport: str = 'grpc_asyncio', request_t '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client._list_exclusions(request) @@ -8041,7 +8041,7 @@ async def test__list_exclusions_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -8057,7 +8057,7 @@ def test__list_exclusions_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8089,7 +8089,7 @@ async def test__list_exclusions_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8125,7 +8125,7 @@ def test__list_exclusions_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_exclusions( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -8133,7 +8133,7 @@ def test__list_exclusions_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -8147,7 +8147,7 @@ def test__list_exclusions_flattened_error(): with pytest.raises(ValueError): client._list_exclusions( logging_config.ListExclusionsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -8167,7 +8167,7 @@ async def test__list_exclusions_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_exclusions( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -8175,7 +8175,7 @@ async def test__list_exclusions_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -8189,7 +8189,7 @@ async def test__list_exclusions_flattened_error_async(): with pytest.raises(ValueError): await client._list_exclusions( logging_config.ListExclusionsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -8409,9 +8409,9 @@ def test__get_exclusion(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client._get_exclusion(request) @@ -8424,9 +8424,9 @@ def test__get_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8442,7 +8442,7 @@ def test__get_exclusion_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8454,7 +8454,7 @@ def test__get_exclusion_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetExclusionRequest( - name='name_value', + name="name_value", ) def test__get_exclusion_use_cached_wrapped_rpc(): @@ -8540,9 +8540,9 @@ async def test__get_exclusion_async(transport: str = 'grpc_asyncio', request_typ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) response = await client._get_exclusion(request) @@ -8555,9 +8555,9 @@ async def test__get_exclusion_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8574,7 +8574,7 @@ def test__get_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8606,7 +8606,7 @@ async def test__get_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8642,7 +8642,7 @@ def test__get_exclusion_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -8650,7 +8650,7 @@ def test__get_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -8664,7 +8664,7 @@ def test__get_exclusion_flattened_error(): with pytest.raises(ValueError): client._get_exclusion( logging_config.GetExclusionRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -8684,7 +8684,7 @@ async def test__get_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -8692,7 +8692,7 @@ async def test__get_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -8706,7 +8706,7 @@ async def test__get_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client._get_exclusion( logging_config.GetExclusionRequest(), - name='name_value', + name="name_value", ) @@ -8730,9 +8730,9 @@ def test__create_exclusion(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client._create_exclusion(request) @@ -8745,9 +8745,9 @@ def test__create_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8763,7 +8763,7 @@ def test__create_exclusion_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CreateExclusionRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8775,7 +8775,7 @@ def test__create_exclusion_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest( - parent='parent_value', + parent="parent_value", ) def test__create_exclusion_use_cached_wrapped_rpc(): @@ -8861,9 +8861,9 @@ async def test__create_exclusion_async(transport: str = 'grpc_asyncio', request_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) response = await client._create_exclusion(request) @@ -8876,9 +8876,9 @@ async def test__create_exclusion_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -8895,7 +8895,7 @@ def test__create_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8927,7 +8927,7 @@ async def test__create_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -8963,8 +8963,8 @@ def test__create_exclusion_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -8972,10 +8972,10 @@ def test__create_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val @@ -8989,8 +8989,8 @@ def test__create_exclusion_flattened_error(): with pytest.raises(ValueError): client._create_exclusion( logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) @pytest.mark.asyncio @@ -9010,8 +9010,8 @@ async def test__create_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._create_exclusion( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -9019,10 +9019,10 @@ async def test__create_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -9036,8 +9036,8 @@ async def test__create_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client._create_exclusion( logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), + parent="parent_value", + exclusion=logging_config.LogExclusion(name="name_value"), ) @@ -9061,9 +9061,9 @@ def test__update_exclusion(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, ) response = client._update_exclusion(request) @@ -9076,9 +9076,9 @@ def test__update_exclusion(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -9094,7 +9094,7 @@ def test__update_exclusion_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9106,7 +9106,7 @@ def test__update_exclusion_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateExclusionRequest( - name='name_value', + name="name_value", ) def test__update_exclusion_use_cached_wrapped_rpc(): @@ -9192,9 +9192,9 @@ async def test__update_exclusion_async(transport: str = 'grpc_asyncio', request_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) response = await client._update_exclusion(request) @@ -9207,9 +9207,9 @@ async def test__update_exclusion_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" assert response.disabled is True @@ -9226,7 +9226,7 @@ def test__update_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9258,7 +9258,7 @@ async def test__update_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9294,9 +9294,9 @@ def test__update_exclusion_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -9304,13 +9304,13 @@ def test__update_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -9324,9 +9324,9 @@ def test__update_exclusion_flattened_error(): with pytest.raises(ValueError): client._update_exclusion( logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -9346,9 +9346,9 @@ async def test__update_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._update_exclusion( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -9356,13 +9356,13 @@ async def test__update_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') + mock_val = logging_config.LogExclusion(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -9376,9 +9376,9 @@ async def test__update_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client._update_exclusion( logging_config.UpdateExclusionRequest(), - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + name="name_value", + exclusion=logging_config.LogExclusion(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -9426,7 +9426,7 @@ def test__delete_exclusion_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.DeleteExclusionRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9438,7 +9438,7 @@ def test__delete_exclusion_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteExclusionRequest( - name='name_value', + name="name_value", ) def test__delete_exclusion_use_cached_wrapped_rpc(): @@ -9549,7 +9549,7 @@ def test__delete_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9581,7 +9581,7 @@ async def test__delete_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9617,7 +9617,7 @@ def test__delete_exclusion_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._delete_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -9625,7 +9625,7 @@ def test__delete_exclusion_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -9639,7 +9639,7 @@ def test__delete_exclusion_flattened_error(): with pytest.raises(ValueError): client._delete_exclusion( logging_config.DeleteExclusionRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -9659,7 +9659,7 @@ async def test__delete_exclusion_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._delete_exclusion( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -9667,7 +9667,7 @@ async def test__delete_exclusion_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -9681,7 +9681,7 @@ async def test__delete_exclusion_flattened_error_async(): with pytest.raises(ValueError): await client._delete_exclusion( logging_config.DeleteExclusionRequest(), - name='name_value', + name="name_value", ) @@ -9705,10 +9705,10 @@ def test__get_cmek_settings(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", ) response = client._get_cmek_settings(request) @@ -9720,10 +9720,10 @@ def test__get_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" def test__get_cmek_settings_non_empty_request_with_auto_populated_field(): @@ -9738,7 +9738,7 @@ def test__get_cmek_settings_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetCmekSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9750,7 +9750,7 @@ def test__get_cmek_settings_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetCmekSettingsRequest( - name='name_value', + name="name_value", ) def test__get_cmek_settings_use_cached_wrapped_rpc(): @@ -9836,10 +9836,10 @@ async def test__get_cmek_settings_async(transport: str = 'grpc_asyncio', request '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", )) response = await client._get_cmek_settings(request) @@ -9851,10 +9851,10 @@ async def test__get_cmek_settings_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio @@ -9870,7 +9870,7 @@ def test__get_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9902,7 +9902,7 @@ async def test__get_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9944,10 +9944,10 @@ def test__update_cmek_settings(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", ) response = client._update_cmek_settings(request) @@ -9959,10 +9959,10 @@ def test__update_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" def test__update_cmek_settings_non_empty_request_with_auto_populated_field(): @@ -9977,7 +9977,7 @@ def test__update_cmek_settings_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateCmekSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9989,7 +9989,7 @@ def test__update_cmek_settings_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateCmekSettingsRequest( - name='name_value', + name="name_value", ) def test__update_cmek_settings_use_cached_wrapped_rpc(): @@ -10075,10 +10075,10 @@ async def test__update_cmek_settings_async(transport: str = 'grpc_asyncio', requ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", )) response = await client._update_cmek_settings(request) @@ -10090,10 +10090,10 @@ async def test__update_cmek_settings_async(transport: str = 'grpc_asyncio', requ # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_key_version_name == 'kms_key_version_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_key_version_name == "kms_key_version_name_value" + assert response.service_account_id == "service_account_id_value" @pytest.mark.asyncio @@ -10109,7 +10109,7 @@ def test__update_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10141,7 +10141,7 @@ async def test__update_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10183,10 +10183,10 @@ def test__get_settings(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, ) response = client._get_settings(request) @@ -10199,10 +10199,10 @@ def test__get_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10218,7 +10218,7 @@ def test__get_settings_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.GetSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10230,7 +10230,7 @@ def test__get_settings_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSettingsRequest( - name='name_value', + name="name_value", ) def test__get_settings_use_cached_wrapped_rpc(): @@ -10316,10 +10316,10 @@ async def test__get_settings_async(transport: str = 'grpc_asyncio', request_type '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, )) response = await client._get_settings(request) @@ -10332,10 +10332,10 @@ async def test__get_settings_async(transport: str = 'grpc_asyncio', request_type # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10352,7 +10352,7 @@ def test__get_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10384,7 +10384,7 @@ async def test__get_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10420,7 +10420,7 @@ def test__get_settings_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_settings( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -10428,7 +10428,7 @@ def test__get_settings_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -10442,7 +10442,7 @@ def test__get_settings_flattened_error(): with pytest.raises(ValueError): client._get_settings( logging_config.GetSettingsRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -10462,7 +10462,7 @@ async def test__get_settings_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_settings( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -10470,7 +10470,7 @@ async def test__get_settings_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -10484,7 +10484,7 @@ async def test__get_settings_flattened_error_async(): with pytest.raises(ValueError): await client._get_settings( logging_config.GetSettingsRequest(), - name='name_value', + name="name_value", ) @@ -10508,10 +10508,10 @@ def test__update_settings(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, ) response = client._update_settings(request) @@ -10524,10 +10524,10 @@ def test__update_settings(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10543,7 +10543,7 @@ def test__update_settings_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.UpdateSettingsRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10555,7 +10555,7 @@ def test__update_settings_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSettingsRequest( - name='name_value', + name="name_value", ) def test__update_settings_use_cached_wrapped_rpc(): @@ -10641,10 +10641,10 @@ async def test__update_settings_async(transport: str = 'grpc_asyncio', request_t '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, )) response = await client._update_settings(request) @@ -10657,10 +10657,10 @@ async def test__update_settings_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.kms_service_account_id == 'kms_service_account_id_value' - assert response.storage_location == 'storage_location_value' + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" assert response.disable_default_sink is True @@ -10677,7 +10677,7 @@ def test__update_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10709,7 +10709,7 @@ async def test__update_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -10745,8 +10745,8 @@ def test__update_settings_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._update_settings( - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -10754,10 +10754,10 @@ def test__update_settings_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].settings - mock_val = logging_config.Settings(name='name_value') + mock_val = logging_config.Settings(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @@ -10771,8 +10771,8 @@ def test__update_settings_flattened_error(): with pytest.raises(ValueError): client._update_settings( logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio @@ -10792,8 +10792,8 @@ async def test__update_settings_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._update_settings( - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected @@ -10801,10 +10801,10 @@ async def test__update_settings_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].settings - mock_val = logging_config.Settings(name='name_value') + mock_val = logging_config.Settings(name="name_value") assert arg == mock_val arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio @@ -10818,8 +10818,8 @@ async def test__update_settings_flattened_error_async(): with pytest.raises(ValueError): await client._update_settings( logging_config.UpdateSettingsRequest(), - settings=logging_config.Settings(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -10867,9 +10867,9 @@ def test__copy_log_entries_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_config.CopyLogEntriesRequest( - name='name_value', - filter='filter_value', - destination='destination_value', + name="name_value", + filter="filter_value", + destination="destination_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10881,9 +10881,9 @@ def test__copy_log_entries_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CopyLogEntriesRequest( - name='name_value', - filter='filter_value', - destination='destination_value', + name="name_value", + filter="filter_value", + destination="destination_value", ) def test__copy_log_entries_use_cached_wrapped_rpc(): @@ -11865,7 +11865,7 @@ async def test_list_buckets_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_buckets(request=None) @@ -11892,13 +11892,13 @@ async def test_get_bucket_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) await client.get_bucket(request=None) @@ -11979,13 +11979,13 @@ async def test_create_bucket_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) await client.create_bucket(request=None) @@ -12012,13 +12012,13 @@ async def test_update_bucket_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', + name="name_value", + description="description_value", retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, analytics_enabled=True, - restricted_fields=['restricted_fields_value'], + restricted_fields=["restricted_fields_value"], )) await client.update_bucket(request=None) @@ -12095,7 +12095,7 @@ async def test__list_views_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client._list_views(request=None) @@ -12122,9 +12122,9 @@ async def test__get_view_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) await client._get_view(request=None) @@ -12151,9 +12151,9 @@ async def test__create_view_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) await client._create_view(request=None) @@ -12180,9 +12180,9 @@ async def test__update_view_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", )) await client._update_view(request=None) @@ -12234,7 +12234,7 @@ async def test__list_sinks_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client._list_sinks(request=None) @@ -12261,13 +12261,13 @@ async def test__get_sink_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) await client._get_sink(request=None) @@ -12295,13 +12295,13 @@ async def test__create_sink_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) await client._create_sink(request=None) @@ -12329,13 +12329,13 @@ async def test__update_sink_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', + name="name_value", + destination="destination_value", + filter="filter_value", + description="description_value", disabled=True, output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', + writer_identity="writer_identity_value", include_children=True, )) await client._update_sink(request=None) @@ -12442,7 +12442,7 @@ async def test__list_links_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client._list_links(request=None) @@ -12469,8 +12469,8 @@ async def test__get_link_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', + name="name_value", + description="description_value", lifecycle_state=logging_config.LifecycleState.ACTIVE, )) await client._get_link(request=None) @@ -12498,7 +12498,7 @@ async def test__list_exclusions_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client._list_exclusions(request=None) @@ -12525,9 +12525,9 @@ async def test__get_exclusion_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) await client._get_exclusion(request=None) @@ -12555,9 +12555,9 @@ async def test__create_exclusion_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) await client._create_exclusion(request=None) @@ -12585,9 +12585,9 @@ async def test__update_exclusion_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', + name="name_value", + description="description_value", + filter="filter_value", disabled=True, )) await client._update_exclusion(request=None) @@ -12640,10 +12640,10 @@ async def test__get_cmek_settings_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", )) await client._get_cmek_settings(request=None) @@ -12670,10 +12670,10 @@ async def test__update_cmek_settings_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_key_version_name="kms_key_version_name_value", + service_account_id="service_account_id_value", )) await client._update_cmek_settings(request=None) @@ -12700,10 +12700,10 @@ async def test__get_settings_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, )) await client._get_settings(request=None) @@ -12731,10 +12731,10 @@ async def test__update_settings_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", disable_default_sink=True, )) await client._update_settings(request=None) diff --git a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 614126cfdb..436e34001f 100755 --- a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -990,7 +990,7 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.DeleteLogRequest( - log_name='log_name_value', + log_name="log_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1002,7 +1002,7 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.DeleteLogRequest( - log_name='log_name_value', + log_name="log_name_value", ) def test_delete_log_use_cached_wrapped_rpc(): @@ -1113,7 +1113,7 @@ def test_delete_log_field_headers(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name_value' + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1145,7 +1145,7 @@ async def test_delete_log_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name_value' + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1181,7 +1181,7 @@ def test_delete_log_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log( - log_name='log_name_value', + log_name="log_name_value", ) # Establish that the underlying call was made with the expected @@ -1189,7 +1189,7 @@ def test_delete_log_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val @@ -1203,7 +1203,7 @@ def test_delete_log_flattened_error(): with pytest.raises(ValueError): client.delete_log( logging.DeleteLogRequest(), - log_name='log_name_value', + log_name="log_name_value", ) @pytest.mark.asyncio @@ -1223,7 +1223,7 @@ async def test_delete_log_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_log( - log_name='log_name_value', + log_name="log_name_value", ) # Establish that the underlying call was made with the expected @@ -1231,7 +1231,7 @@ async def test_delete_log_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -1245,7 +1245,7 @@ async def test_delete_log_flattened_error_async(): with pytest.raises(ValueError): await client.delete_log( logging.DeleteLogRequest(), - log_name='log_name_value', + log_name="log_name_value", ) @@ -1294,7 +1294,7 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.WriteLogEntriesRequest( - log_name='log_name_value', + log_name="log_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1306,7 +1306,7 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.WriteLogEntriesRequest( - log_name='log_name_value', + log_name="log_name_value", ) def test_write_log_entries_use_cached_wrapped_rpc(): @@ -1424,10 +1424,10 @@ def test_write_log_entries_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) # Establish that the underlying call was made with the expected @@ -1435,16 +1435,16 @@ def test_write_log_entries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels - mock_val = {'key_value': 'value_value'} + mock_val = {"key_value": "value_value"} assert arg == mock_val arg = args[0].entries - mock_val = [log_entry.LogEntry(log_name='log_name_value')] + mock_val = [log_entry.LogEntry(log_name="log_name_value")] assert arg == mock_val @@ -1458,10 +1458,10 @@ def test_write_log_entries_flattened_error(): with pytest.raises(ValueError): client.write_log_entries( logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) @pytest.mark.asyncio @@ -1481,10 +1481,10 @@ async def test_write_log_entries_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.write_log_entries( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) # Establish that the underlying call was made with the expected @@ -1492,16 +1492,16 @@ async def test_write_log_entries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].log_name - mock_val = 'log_name_value' + mock_val = "log_name_value" assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + mock_val = monitored_resource_pb2.MonitoredResource(type="type_value") assert arg == mock_val arg = args[0].labels - mock_val = {'key_value': 'value_value'} + mock_val = {"key_value": "value_value"} assert arg == mock_val arg = args[0].entries - mock_val = [log_entry.LogEntry(log_name='log_name_value')] + mock_val = [log_entry.LogEntry(log_name="log_name_value")] assert arg == mock_val @pytest.mark.asyncio @@ -1515,10 +1515,10 @@ async def test_write_log_entries_flattened_error_async(): with pytest.raises(ValueError): await client.write_log_entries( logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], + log_name="log_name_value", + resource=monitored_resource_pb2.MonitoredResource(type="type_value"), + labels={"key_value": "value_value"}, + entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -1542,7 +1542,7 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_log_entries(request) @@ -1554,7 +1554,7 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_log_entries_non_empty_request_with_auto_populated_field(): @@ -1569,9 +1569,9 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListLogEntriesRequest( - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1583,9 +1583,9 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogEntriesRequest( - filter='filter_value', - order_by='order_by_value', - page_token='page_token_value', + filter="filter_value", + order_by="order_by_value", + page_token="page_token_value", ) def test_list_log_entries_use_cached_wrapped_rpc(): @@ -1671,7 +1671,7 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_log_entries(request) @@ -1683,7 +1683,7 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1705,9 +1705,9 @@ def test_list_log_entries_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) # Establish that the underlying call was made with the expected @@ -1715,13 +1715,13 @@ def test_list_log_entries_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].resource_names - mock_val = ['resource_names_value'] + mock_val = ["resource_names_value"] assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val arg = args[0].order_by - mock_val = 'order_by_value' + mock_val = "order_by_value" assert arg == mock_val @@ -1735,9 +1735,9 @@ def test_list_log_entries_flattened_error(): with pytest.raises(ValueError): client.list_log_entries( logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) @pytest.mark.asyncio @@ -1757,9 +1757,9 @@ async def test_list_log_entries_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_log_entries( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) # Establish that the underlying call was made with the expected @@ -1767,13 +1767,13 @@ async def test_list_log_entries_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].resource_names - mock_val = ['resource_names_value'] + mock_val = ["resource_names_value"] assert arg == mock_val arg = args[0].filter - mock_val = 'filter_value' + mock_val = "filter_value" assert arg == mock_val arg = args[0].order_by - mock_val = 'order_by_value' + mock_val = "order_by_value" assert arg == mock_val @pytest.mark.asyncio @@ -1787,9 +1787,9 @@ async def test_list_log_entries_flattened_error_async(): with pytest.raises(ValueError): await client.list_log_entries( logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', + resource_names=["resource_names_value"], + filter="filter_value", + order_by="order_by_value", ) @@ -2004,7 +2004,7 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client.list_monitored_resource_descriptors(request) @@ -2016,7 +2016,7 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): @@ -2031,7 +2031,7 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListMonitoredResourceDescriptorsRequest( - page_token='page_token_value', + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2043,7 +2043,7 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListMonitoredResourceDescriptorsRequest( - page_token='page_token_value', + page_token="page_token_value", ) def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): @@ -2129,7 +2129,7 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client.list_monitored_resource_descriptors(request) @@ -2141,7 +2141,7 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -2360,8 +2360,8 @@ def test_list_logs(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', + log_names=["log_names_value"], + next_page_token="next_page_token_value", ) response = client.list_logs(request) @@ -2373,8 +2373,8 @@ def test_list_logs(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' + assert response.log_names == ["log_names_value"] + assert response.next_page_token == "next_page_token_value" def test_list_logs_non_empty_request_with_auto_populated_field(): @@ -2389,8 +2389,8 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging.ListLogsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2402,8 +2402,8 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_logs_use_cached_wrapped_rpc(): @@ -2489,8 +2489,8 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', + log_names=["log_names_value"], + next_page_token="next_page_token_value", )) response = await client.list_logs(request) @@ -2502,8 +2502,8 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' + assert response.log_names == ["log_names_value"] + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -2519,7 +2519,7 @@ def test_list_logs_field_headers(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2551,7 +2551,7 @@ async def test_list_logs_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2587,7 +2587,7 @@ def test_list_logs_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_logs( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2595,7 +2595,7 @@ def test_list_logs_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -2609,7 +2609,7 @@ def test_list_logs_flattened_error(): with pytest.raises(ValueError): client.list_logs( logging.ListLogsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -2629,7 +2629,7 @@ async def test_list_logs_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_logs( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -2637,7 +2637,7 @@ async def test_list_logs_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -2651,7 +2651,7 @@ async def test_list_logs_flattened_error_async(): with pytest.raises(ValueError): await client.list_logs( logging.ListLogsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3283,7 +3283,7 @@ async def test_list_log_entries_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_log_entries(request=None) @@ -3310,7 +3310,7 @@ async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client.list_monitored_resource_descriptors(request=None) @@ -3337,8 +3337,8 @@ async def test_list_logs_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', + log_names=["log_names_value"], + next_page_token="next_page_token_value", )) await client.list_logs(request=None) diff --git a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index d843efe3c1..b2614e1f50 100755 --- a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -964,7 +964,7 @@ def test__list_log_metrics(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", ) response = client._list_log_metrics(request) @@ -976,7 +976,7 @@ def test__list_log_metrics(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" def test__list_log_metrics_non_empty_request_with_auto_populated_field(): @@ -991,8 +991,8 @@ def test__list_log_metrics_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.ListLogMetricsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1004,8 +1004,8 @@ def test__list_log_metrics_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.ListLogMetricsRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test__list_log_metrics_use_cached_wrapped_rpc(): @@ -1091,7 +1091,7 @@ async def test__list_log_metrics_async(transport: str = 'grpc_asyncio', request_ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) response = await client._list_log_metrics(request) @@ -1103,7 +1103,7 @@ async def test__list_log_metrics_async(transport: str = 'grpc_asyncio', request_ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio @@ -1119,7 +1119,7 @@ def test__list_log_metrics_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1151,7 +1151,7 @@ async def test__list_log_metrics_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1187,7 +1187,7 @@ def test__list_log_metrics_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._list_log_metrics( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1195,7 +1195,7 @@ def test__list_log_metrics_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1209,7 +1209,7 @@ def test__list_log_metrics_flattened_error(): with pytest.raises(ValueError): client._list_log_metrics( logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -1229,7 +1229,7 @@ async def test__list_log_metrics_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._list_log_metrics( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1237,7 +1237,7 @@ async def test__list_log_metrics_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -1251,7 +1251,7 @@ async def test__list_log_metrics_flattened_error_async(): with pytest.raises(ValueError): await client._list_log_metrics( logging_metrics.ListLogMetricsRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1471,12 +1471,12 @@ def test__get_log_metric(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client._get_log_metric(request) @@ -1489,12 +1489,12 @@ def test__get_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1510,7 +1510,7 @@ def test__get_log_metric_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.GetLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1522,7 +1522,7 @@ def test__get_log_metric_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.GetLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) def test__get_log_metric_use_cached_wrapped_rpc(): @@ -1608,12 +1608,12 @@ async def test__get_log_metric_async(transport: str = 'grpc_asyncio', request_ty '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) response = await client._get_log_metric(request) @@ -1626,12 +1626,12 @@ async def test__get_log_metric_async(transport: str = 'grpc_asyncio', request_ty # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1648,7 +1648,7 @@ def test__get_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1680,7 +1680,7 @@ async def test__get_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1716,7 +1716,7 @@ def test__get_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._get_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -1724,7 +1724,7 @@ def test__get_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @@ -1738,7 +1738,7 @@ def test__get_log_metric_flattened_error(): with pytest.raises(ValueError): client._get_log_metric( logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @pytest.mark.asyncio @@ -1758,7 +1758,7 @@ async def test__get_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._get_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -1766,7 +1766,7 @@ async def test__get_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -1780,7 +1780,7 @@ async def test__get_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client._get_log_metric( logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @@ -1804,12 +1804,12 @@ def test__create_log_metric(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client._create_log_metric(request) @@ -1822,12 +1822,12 @@ def test__create_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1843,7 +1843,7 @@ def test__create_log_metric_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.CreateLogMetricRequest( - parent='parent_value', + parent="parent_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1855,7 +1855,7 @@ def test__create_log_metric_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.CreateLogMetricRequest( - parent='parent_value', + parent="parent_value", ) def test__create_log_metric_use_cached_wrapped_rpc(): @@ -1941,12 +1941,12 @@ async def test__create_log_metric_async(transport: str = 'grpc_asyncio', request '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) response = await client._create_log_metric(request) @@ -1959,12 +1959,12 @@ async def test__create_log_metric_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1981,7 +1981,7 @@ def test__create_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2013,7 +2013,7 @@ async def test__create_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2049,8 +2049,8 @@ def test__create_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2058,10 +2058,10 @@ def test__create_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @@ -2075,8 +2075,8 @@ def test__create_log_metric_flattened_error(): with pytest.raises(ValueError): client._create_log_metric( logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) @pytest.mark.asyncio @@ -2096,8 +2096,8 @@ async def test__create_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._create_log_metric( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2105,10 +2105,10 @@ async def test__create_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -2122,8 +2122,8 @@ async def test__create_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client._create_log_metric( logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) @@ -2147,12 +2147,12 @@ def test__update_log_metric(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) response = client._update_log_metric(request) @@ -2165,12 +2165,12 @@ def test__update_log_metric(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2186,7 +2186,7 @@ def test__update_log_metric_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.UpdateLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2198,7 +2198,7 @@ def test__update_log_metric_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.UpdateLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) def test__update_log_metric_use_cached_wrapped_rpc(): @@ -2284,12 +2284,12 @@ async def test__update_log_metric_async(transport: str = 'grpc_asyncio', request '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) response = await client._update_log_metric(request) @@ -2302,12 +2302,12 @@ async def test__update_log_metric_async(transport: str = 'grpc_asyncio', request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.bucket_name == 'bucket_name_value' + assert response.name == "name_value" + assert response.description == "description_value" + assert response.filter == "filter_value" + assert response.bucket_name == "bucket_name_value" assert response.disabled is True - assert response.value_extractor == 'value_extractor_value' + assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -2324,7 +2324,7 @@ def test__update_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2356,7 +2356,7 @@ async def test__update_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2392,8 +2392,8 @@ def test__update_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2401,10 +2401,10 @@ def test__update_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @@ -2418,8 +2418,8 @@ def test__update_log_metric_flattened_error(): with pytest.raises(ValueError): client._update_log_metric( logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) @pytest.mark.asyncio @@ -2439,8 +2439,8 @@ async def test__update_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._update_log_metric( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2448,10 +2448,10 @@ async def test__update_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val arg = args[0].metric - mock_val = logging_metrics.LogMetric(name='name_value') + mock_val = logging_metrics.LogMetric(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -2465,8 +2465,8 @@ async def test__update_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client._update_log_metric( logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), + metric_name="metric_name_value", + metric=logging_metrics.LogMetric(name="name_value"), ) @@ -2514,7 +2514,7 @@ def test__delete_log_metric_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = logging_metrics.DeleteLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2526,7 +2526,7 @@ def test__delete_log_metric_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.DeleteLogMetricRequest( - metric_name='metric_name_value', + metric_name="metric_name_value", ) def test__delete_log_metric_use_cached_wrapped_rpc(): @@ -2637,7 +2637,7 @@ def test__delete_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2669,7 +2669,7 @@ async def test__delete_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name_value' + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2705,7 +2705,7 @@ def test__delete_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client._delete_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -2713,7 +2713,7 @@ def test__delete_log_metric_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @@ -2727,7 +2727,7 @@ def test__delete_log_metric_flattened_error(): with pytest.raises(ValueError): client._delete_log_metric( logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @pytest.mark.asyncio @@ -2747,7 +2747,7 @@ async def test__delete_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client._delete_log_metric( - metric_name='metric_name_value', + metric_name="metric_name_value", ) # Establish that the underlying call was made with the expected @@ -2755,7 +2755,7 @@ async def test__delete_log_metric_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].metric_name - mock_val = 'metric_name_value' + mock_val = "metric_name_value" assert arg == mock_val @pytest.mark.asyncio @@ -2769,7 +2769,7 @@ async def test__delete_log_metric_flattened_error_async(): with pytest.raises(ValueError): await client._delete_log_metric( logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', + metric_name="metric_name_value", ) @@ -3019,7 +3019,7 @@ async def test__list_log_metrics_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', + next_page_token="next_page_token_value", )) await client._list_log_metrics(request=None) @@ -3046,12 +3046,12 @@ async def test__get_log_metric_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) await client._get_log_metric(request=None) @@ -3079,12 +3079,12 @@ async def test__create_log_metric_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) await client._create_log_metric(request=None) @@ -3112,12 +3112,12 @@ async def test__update_log_metric_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', + name="name_value", + description="description_value", + filter="filter_value", + bucket_name="bucket_name_value", disabled=True, - value_extractor='value_extractor_value', + value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, )) await client._update_log_metric(request=None) diff --git a/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/tests/integration/goldens/redis/google/cloud/redis/__init__.py index 98170c7665..851a8fae81 100755 --- a/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -49,33 +49,34 @@ from google.cloud.redis_v1.types.cloud_redis import WeeklyMaintenanceWindow from google.cloud.redis_v1.types.cloud_redis import ZoneMetadata -__all__ = ('CloudRedisClient', - 'CloudRedisAsyncClient', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'ExportInstanceRequest', - 'FailoverInstanceRequest', - 'GcsDestination', - 'GcsSource', - 'GetInstanceAuthStringRequest', - 'GetInstanceRequest', - 'ImportInstanceRequest', - 'InputConfig', - 'Instance', - 'InstanceAuthString', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'LocationMetadata', - 'MaintenancePolicy', - 'MaintenanceSchedule', - 'NodeInfo', - 'OperationMetadata', - 'OutputConfig', - 'PersistenceConfig', - 'RescheduleMaintenanceRequest', - 'TlsCertificate', - 'UpdateInstanceRequest', - 'UpgradeInstanceRequest', - 'WeeklyMaintenanceWindow', - 'ZoneMetadata', +__all__ = ( + "CloudRedisClient", + "CloudRedisAsyncClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "ExportInstanceRequest", + "FailoverInstanceRequest", + "GcsDestination", + "GcsSource", + "GetInstanceAuthStringRequest", + "GetInstanceRequest", + "ImportInstanceRequest", + "InputConfig", + "Instance", + "InstanceAuthString", + "ListInstancesRequest", + "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "OutputConfig", + "PersistenceConfig", + "RescheduleMaintenanceRequest", + "TlsCertificate", + "UpdateInstanceRequest", + "UpgradeInstanceRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", ) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index c5182e2f84..bb7fcdaa3d 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -59,10 +59,10 @@ from .types.cloud_redis import WeeklyMaintenanceWindow from .types.cloud_redis import ZoneMetadata -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.redis_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.redis_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: @@ -72,20 +72,24 @@ _py_version_str = sys.version.split()[0] _package_label = "google.cloud.redis_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -123,54 +127,58 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'CloudRedisAsyncClient', -'CloudRedisClient', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'ExportInstanceRequest', -'FailoverInstanceRequest', -'GcsDestination', -'GcsSource', -'GetInstanceAuthStringRequest', -'GetInstanceRequest', -'ImportInstanceRequest', -'InputConfig', -'Instance', -'InstanceAuthString', -'ListInstancesRequest', -'ListInstancesResponse', -'LocationMetadata', -'MaintenancePolicy', -'MaintenanceSchedule', -'NodeInfo', -'OperationMetadata', -'OutputConfig', -'PersistenceConfig', -'RescheduleMaintenanceRequest', -'TlsCertificate', -'UpdateInstanceRequest', -'UpgradeInstanceRequest', -'WeeklyMaintenanceWindow', -'ZoneMetadata', + "CloudRedisAsyncClient", + "CloudRedisClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "ExportInstanceRequest", + "FailoverInstanceRequest", + "GcsDestination", + "GcsSource", + "GetInstanceAuthStringRequest", + "GetInstanceRequest", + "ImportInstanceRequest", + "InputConfig", + "Instance", + "InstanceAuthString", + "ListInstancesRequest", + "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "OutputConfig", + "PersistenceConfig", + "RescheduleMaintenanceRequest", + "TlsCertificate", + "UpdateInstanceRequest", + "UpgradeInstanceRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", ) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py index f302df64f1..916eb4e24c 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -17,6 +17,6 @@ from .async_client import CloudRedisAsyncClient __all__ = ( - 'CloudRedisClient', - 'CloudRedisAsyncClient', + "CloudRedisClient", + "CloudRedisAsyncClient", ) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index ea3e960a8e..cc0674ef36 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.redis_v1 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -36,10 +47,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -49,12 +60,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class CloudRedisAsyncClient: """Configures and manages Cloud Memorystore for Redis instances @@ -198,12 +211,14 @@ def universe_domain(self) -> str: get_transport_class = CloudRedisClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the cloud redis async client. Args: @@ -261,31 +276,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.redis.v1.CloudRedis", "credentialsType": None, - } + }, ) - async def list_instances(self, - request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesAsyncPager: + async def list_instances( + self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesAsyncPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -360,8 +377,7 @@ async def sample_list_instances(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -379,11 +395,13 @@ async def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -410,14 +428,15 @@ async def sample_list_instances(): # Done; return the response. return response - async def get_instance(self, - request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.Instance: + async def get_instance( + self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. .. code-block:: python @@ -476,8 +495,7 @@ async def sample_get_instance(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -495,11 +513,13 @@ async def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -515,14 +535,15 @@ async def sample_get_instance(): # Done; return the response. return response - async def get_instance_auth_string(self, - request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.InstanceAuthString: + async def get_instance_auth_string( + self, + request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.InstanceAuthString: r"""Gets the AUTH string for a Redis instance. If AUTH is not enabled for the instance the response will be empty. This information is not included in the details returned @@ -584,8 +605,7 @@ async def sample_get_instance_auth_string(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -603,11 +623,13 @@ async def sample_get_instance_auth_string(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -623,16 +645,17 @@ async def sample_get_instance_auth_string(): # Done; return the response. return response - async def create_instance(self, - request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_instance( + self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -740,8 +763,7 @@ async def sample_create_instance(): flattened_params = [parent, instance_id, instance] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -763,11 +785,13 @@ async def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -791,15 +815,16 @@ async def sample_create_instance(): # Done; return the response. return response - async def update_instance(self, - request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_instance( + self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new @@ -891,8 +916,7 @@ async def sample_update_instance(): flattened_params = [update_mask, instance] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -912,11 +936,13 @@ async def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("instance.name", request.instance.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -940,15 +966,16 @@ async def sample_update_instance(): # Done; return the response. return response - async def upgrade_instance(self, - request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - redis_version: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def upgrade_instance( + self, + request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + redis_version: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Upgrades Redis instance to the newer Redis version specified in the request. @@ -1025,8 +1052,7 @@ async def sample_upgrade_instance(): flattened_params = [name, redis_version] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1046,11 +1072,13 @@ async def sample_upgrade_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1074,15 +1102,16 @@ async def sample_upgrade_instance(): # Done; return the response. return response - async def import_instance(self, - request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[cloud_redis.InputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def import_instance( + self, + request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + input_config: Optional[cloud_redis.InputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. Redis may stop serving during this operation. Instance @@ -1169,8 +1198,7 @@ async def sample_import_instance(): flattened_params = [name, input_config] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1190,11 +1218,13 @@ async def sample_import_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1218,15 +1248,16 @@ async def sample_import_instance(): # Done; return the response. return response - async def export_instance(self, - request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[cloud_redis.OutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def export_instance( + self, + request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + output_config: Optional[cloud_redis.OutputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. @@ -1310,8 +1341,7 @@ async def sample_export_instance(): flattened_params = [name, output_config] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1331,11 +1361,13 @@ async def sample_export_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1359,15 +1391,16 @@ async def sample_export_instance(): # Done; return the response. return response - async def failover_instance(self, - request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def failover_instance( + self, + request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -1445,8 +1478,7 @@ async def sample_failover_instance(): flattened_params = [name, data_protection_mode] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1466,11 +1498,13 @@ async def sample_failover_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1494,14 +1528,15 @@ async def sample_failover_instance(): # Done; return the response. return response - async def delete_instance(self, - request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_instance( + self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1577,8 +1612,7 @@ async def sample_delete_instance(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1596,11 +1630,13 @@ async def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1624,16 +1660,17 @@ async def sample_delete_instance(): # Done; return the response. return response - async def reschedule_maintenance(self, - request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, - *, - name: Optional[str] = None, - reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, - schedule_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def reschedule_maintenance( + self, + request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Reschedule maintenance for a given instance in a given project and location. @@ -1718,8 +1755,7 @@ async def sample_reschedule_maintenance(): flattened_params = [name, reschedule_type, schedule_time] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1741,11 +1777,13 @@ async def sample_reschedule_maintenance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -1806,17 +1844,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1858,17 +1897,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1914,16 +1954,18 @@ async def delete_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def cancel_operation( self, @@ -1965,16 +2007,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def wait_operation( self, @@ -2019,17 +2063,18 @@ async def wait_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2071,17 +2116,18 @@ async def get_location( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2123,17 +2169,18 @@ async def list_locations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2144,12 +2191,11 @@ async def __aenter__(self) -> "CloudRedisAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "CloudRedisAsyncClient", -) +__all__ = ("CloudRedisAsyncClient",) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index f3da10c2fc..8b1c819840 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.redis_v1 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -50,10 +63,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -61,10 +74,12 @@ from .transports.grpc import CloudRedisGrpcTransport from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .transports.rest import CloudRedisRestTransport + try: from .transports.rest_asyncio import AsyncCloudRedisRestTransport + HAS_ASYNC_REST_DEPENDENCIES = True -except ImportError as e: # pragma: NO COVER +except ImportError as e: # pragma: NO COVER HAS_ASYNC_REST_DEPENDENCIES = False ASYNC_REST_EXCEPTION = e @@ -76,6 +91,7 @@ class CloudRedisClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport @@ -83,9 +99,10 @@ class CloudRedisClientMeta(type): if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CloudRedisTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudRedisTransport]: """Returns an appropriate transport class. Args: @@ -144,9 +161,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -155,16 +170,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "redis.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "redis.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -178,21 +192,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -227,7 +239,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): CloudRedisClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -244,73 +257,118 @@ def transport(self) -> CloudRedisTransport: return self._transport @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: + def parse_instance_path( + path: str, + ) -> Dict[str, str]: """Parses a instance path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -342,8 +400,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = CloudRedisClient._use_client_cert_effective() @@ -473,7 +533,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -513,12 +573,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the cloud redis client. Args: @@ -578,12 +640,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CloudRedisClient._read_environment_variables() self._client_cert_source = CloudRedisClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = CloudRedisClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -603,22 +665,22 @@ def __init__(self, *, if transport_provided: # transport is a CloudRedisTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(CloudRedisTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - CloudRedisClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or CloudRedisClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( @@ -634,7 +696,6 @@ def __init__(self, *, "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, - } provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] if provided_unsupported_params: @@ -670,25 +731,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisClient`.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.redis.v1.CloudRedis", "credentialsType": None, - } + }, ) - def list_instances(self, - request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesPager: + def list_instances( + self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -763,8 +827,7 @@ def sample_list_instances(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -781,11 +844,13 @@ def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -812,14 +877,15 @@ def sample_list_instances(): # Done; return the response. return response - def get_instance(self, - request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.Instance: + def get_instance( + self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. .. code-block:: python @@ -878,8 +944,7 @@ def sample_get_instance(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -896,11 +961,13 @@ def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -916,14 +983,15 @@ def sample_get_instance(): # Done; return the response. return response - def get_instance_auth_string(self, - request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.InstanceAuthString: + def get_instance_auth_string( + self, + request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.InstanceAuthString: r"""Gets the AUTH string for a Redis instance. If AUTH is not enabled for the instance the response will be empty. This information is not included in the details returned @@ -985,8 +1053,7 @@ def sample_get_instance_auth_string(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1003,11 +1070,13 @@ def sample_get_instance_auth_string(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1023,16 +1092,17 @@ def sample_get_instance_auth_string(): # Done; return the response. return response - def create_instance(self, - request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_instance( + self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -1140,8 +1210,7 @@ def sample_create_instance(): flattened_params = [parent, instance_id, instance] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1162,11 +1231,13 @@ def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1190,15 +1261,16 @@ def sample_create_instance(): # Done; return the response. return response - def update_instance(self, - request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_instance( + self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new @@ -1290,8 +1362,7 @@ def sample_update_instance(): flattened_params = [update_mask, instance] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1310,11 +1381,13 @@ def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("instance.name", request.instance.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1338,15 +1411,16 @@ def sample_update_instance(): # Done; return the response. return response - def upgrade_instance(self, - request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - redis_version: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def upgrade_instance( + self, + request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + redis_version: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Upgrades Redis instance to the newer Redis version specified in the request. @@ -1423,8 +1497,7 @@ def sample_upgrade_instance(): flattened_params = [name, redis_version] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1443,11 +1516,13 @@ def sample_upgrade_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1471,15 +1546,16 @@ def sample_upgrade_instance(): # Done; return the response. return response - def import_instance(self, - request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - input_config: Optional[cloud_redis.InputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def import_instance( + self, + request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + input_config: Optional[cloud_redis.InputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. Redis may stop serving during this operation. Instance @@ -1566,8 +1642,7 @@ def sample_import_instance(): flattened_params = [name, input_config] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1586,11 +1661,13 @@ def sample_import_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1614,15 +1691,16 @@ def sample_import_instance(): # Done; return the response. return response - def export_instance(self, - request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - output_config: Optional[cloud_redis.OutputConfig] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def export_instance( + self, + request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + output_config: Optional[cloud_redis.OutputConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. @@ -1706,8 +1784,7 @@ def sample_export_instance(): flattened_params = [name, output_config] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1726,11 +1803,13 @@ def sample_export_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1754,15 +1833,16 @@ def sample_export_instance(): # Done; return the response. return response - def failover_instance(self, - request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def failover_instance( + self, + request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -1840,8 +1920,7 @@ def sample_failover_instance(): flattened_params = [name, data_protection_mode] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1860,11 +1939,13 @@ def sample_failover_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1888,14 +1969,15 @@ def sample_failover_instance(): # Done; return the response. return response - def delete_instance(self, - request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_instance( + self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1971,8 +2053,7 @@ def sample_delete_instance(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1989,11 +2070,13 @@ def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2017,16 +2100,17 @@ def sample_delete_instance(): # Done; return the response. return response - def reschedule_maintenance(self, - request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, - *, - name: Optional[str] = None, - reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, - schedule_time: Optional[timestamp_pb2.Timestamp] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def reschedule_maintenance( + self, + request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Reschedule maintenance for a given instance in a given project and location. @@ -2111,8 +2195,7 @@ def sample_reschedule_maintenance(): flattened_params = [name, reschedule_type, schedule_time] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2133,11 +2216,13 @@ def sample_reschedule_maintenance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -2211,10 +2296,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -2222,7 +2304,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2267,10 +2353,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -2278,7 +2361,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2327,16 +2414,18 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -2378,16 +2467,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def wait_operation( self, @@ -2432,10 +2523,7 @@ def wait_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -2443,7 +2531,11 @@ def wait_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2488,10 +2580,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -2499,7 +2588,11 @@ def get_location( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2544,10 +2637,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -2555,7 +2645,11 @@ def list_locations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2569,6 +2663,4 @@ def list_locations( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "CloudRedisClient", -) +__all__ = ("CloudRedisClient",) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index 4e0e4cb323..db7bea6d75 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -44,14 +45,17 @@ class ListInstancesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., cloud_redis.ListInstancesResponse], - request: cloud_redis.ListInstancesRequest, - response: cloud_redis.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., cloud_redis.ListInstancesResponse], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -92,7 +96,7 @@ def __iter__(self) -> Iterator[cloud_redis.Instance]: yield from page.instances def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListInstancesAsyncPager: @@ -112,14 +116,17 @@ class ListInstancesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], - request: cloud_redis.ListInstancesRequest, - response: cloud_redis.ListInstancesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -154,6 +161,7 @@ async def pages(self) -> AsyncIterator[cloud_redis.ListInstancesResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[cloud_redis.Instance]: async def async_generator(): async for page in self.pages: @@ -163,4 +171,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 1cbbf54c25..5d26ad11b0 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -21,11 +21,13 @@ from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .rest import CloudRedisRestTransport from .rest import CloudRedisRestInterceptor + ASYNC_REST_CLASSES: Tuple[str, ...] try: from .rest_asyncio import AsyncCloudRedisRestTransport from .rest_asyncio import AsyncCloudRedisRestInterceptor - ASYNC_REST_CLASSES = ('AsyncCloudRedisRestTransport', 'AsyncCloudRedisRestInterceptor') + + ASYNC_REST_CLASSES = ("AsyncCloudRedisRestTransport", "AsyncCloudRedisRestInterceptor") HAS_REST_ASYNC = True except ImportError: # pragma: NO COVER ASYNC_REST_CLASSES = () @@ -34,16 +36,16 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] -_transport_registry['grpc'] = CloudRedisGrpcTransport -_transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport -_transport_registry['rest'] = CloudRedisRestTransport +_transport_registry["grpc"] = CloudRedisGrpcTransport +_transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport +_transport_registry["rest"] = CloudRedisRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport + _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport __all__ = ( - 'CloudRedisTransport', - 'CloudRedisGrpcTransport', - 'CloudRedisGrpcAsyncIOTransport', - 'CloudRedisRestTransport', - 'CloudRedisRestInterceptor', + "CloudRedisTransport", + "CloudRedisGrpcTransport", + "CloudRedisGrpcAsyncIOTransport", + "CloudRedisRestTransport", + "CloudRedisRestInterceptor", ) + ASYNC_REST_CLASSES diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 0eb82cc696..343b7283c0 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -25,12 +25,12 @@ from google.api_core import retry as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -41,29 +41,32 @@ class CloudRedisTransport(abc.ABC): """Abstract transport class for CloudRedis.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', + "https://www.googleapis.com/auth/cloud-platform", ) + # fmt: on - DEFAULT_HOST: str = 'redis.googleapis.com' + DEFAULT_HOST: str = "redis.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -99,10 +102,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -110,15 +113,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -218,12 +225,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ @@ -234,104 +241,159 @@ def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() + # fmt: off @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - Union[ - cloud_redis.ListInstancesResponse, - Awaitable[cloud_redis.ListInstancesResponse] - ]]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], + Union[ + cloud_redis.ListInstancesResponse, + Awaitable[cloud_redis.ListInstancesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - Union[ - cloud_redis.Instance, - Awaitable[cloud_redis.Instance] - ]]: + def get_instance( + self, + ) -> Callable[ + [cloud_redis.GetInstanceRequest], + Union[ + cloud_redis.Instance, + Awaitable[cloud_redis.Instance] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - Union[ - cloud_redis.InstanceAuthString, - Awaitable[cloud_redis.InstanceAuthString] - ]]: + def get_instance_auth_string( + self, + ) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + Union[ + cloud_redis.InstanceAuthString, + Awaitable[cloud_redis.InstanceAuthString] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_instance( + self, + ) -> Callable[ + [cloud_redis.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_instance( + self, + ) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def upgrade_instance( + self, + ) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def import_instance( + self, + ) -> Callable[ + [cloud_redis.ImportInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def export_instance( + self, + ) -> Callable[ + [cloud_redis.ExportInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def failover_instance( + self, + ) -> Callable[ + [cloud_redis.FailoverInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_instance( + self, + ) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def reschedule_maintenance( + self, + ) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -345,53 +407,37 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: raise NotImplementedError() @property def wait_operation( self, - ) -> Callable[ - [operations_pb2.WaitOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.WaitOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]]]: raise NotImplementedError() @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]]]: raise NotImplementedError() @property @@ -399,6 +445,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'CloudRedisTransport', -) +__all__ = ("CloudRedisTransport",) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index ac164835f5..a6e5f360de 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -22,7 +22,7 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -31,13 +31,14 @@ import grpc # type: ignore import proto # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -57,10 +58,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -68,7 +71,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -94,7 +97,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": client_call_details.method, "response": grpc_response, @@ -136,28 +139,31 @@ class CloudRedisGrpcTransport(CloudRedisTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -232,7 +238,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -241,7 +248,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -276,19 +284,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -324,13 +334,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -342,17 +351,15 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[[cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse]: r"""Return a callable for the list instances method over gRPC. Lists all Redis instances owned by a project in either the @@ -376,18 +383,18 @@ def list_instances(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ListInstances', + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ListInstances", request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, ) - return self._stubs['list_instances'] + return self._stubs["list_instances"] @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: r"""Return a callable for the get instance method over gRPC. Gets the details of a specific Redis instance. @@ -402,18 +409,18 @@ def get_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstance', + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstance", request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, ) - return self._stubs['get_instance'] + return self._stubs["get_instance"] @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - cloud_redis.InstanceAuthString]: + def get_instance_auth_string( + self, + ) -> Callable[[cloud_redis.GetInstanceAuthStringRequest], cloud_redis.InstanceAuthString]: r"""Return a callable for the get instance auth string method over gRPC. Gets the AUTH string for a Redis instance. If AUTH is @@ -431,18 +438,18 @@ def get_instance_auth_string(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance_auth_string' not in self._stubs: - self._stubs['get_instance_auth_string'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', + if "get_instance_auth_string" not in self._stubs: + self._stubs["get_instance_auth_string"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString", request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, response_deserializer=cloud_redis.InstanceAuthString.deserialize, ) - return self._stubs['get_instance_auth_string'] + return self._stubs["get_instance_auth_string"] @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a Redis instance based on the specified tier and memory @@ -470,18 +477,18 @@ def create_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/CreateInstance', + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/CreateInstance", request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_instance'] + return self._stubs["create_instance"] @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. Updates the metadata and configuration of a specific @@ -501,18 +508,18 @@ def update_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpdateInstance", request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_instance'] + return self._stubs["update_instance"] @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - operations_pb2.Operation]: + def upgrade_instance( + self, + ) -> Callable[[cloud_redis.UpgradeInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the upgrade instance method over gRPC. Upgrades Redis instance to the newer Redis version @@ -528,18 +535,18 @@ def upgrade_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'upgrade_instance' not in self._stubs: - self._stubs['upgrade_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', + if "upgrade_instance" not in self._stubs: + self._stubs["upgrade_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpgradeInstance", request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['upgrade_instance'] + return self._stubs["upgrade_instance"] @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - operations_pb2.Operation]: + def import_instance( + self, + ) -> Callable[[cloud_redis.ImportInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the import instance method over gRPC. Import a Redis RDB snapshot file from Cloud Storage @@ -562,18 +569,18 @@ def import_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'import_instance' not in self._stubs: - self._stubs['import_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ImportInstance', + if "import_instance" not in self._stubs: + self._stubs["import_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ImportInstance", request_serializer=cloud_redis.ImportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['import_instance'] + return self._stubs["import_instance"] @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - operations_pb2.Operation]: + def export_instance( + self, + ) -> Callable[[cloud_redis.ExportInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the export instance method over gRPC. Export Redis instance data into a Redis RDB format @@ -593,18 +600,18 @@ def export_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_instance' not in self._stubs: - self._stubs['export_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ExportInstance', + if "export_instance" not in self._stubs: + self._stubs["export_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ExportInstance", request_serializer=cloud_redis.ExportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_instance'] + return self._stubs["export_instance"] @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - operations_pb2.Operation]: + def failover_instance( + self, + ) -> Callable[[cloud_redis.FailoverInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the failover instance method over gRPC. Initiates a failover of the primary node to current @@ -621,18 +628,18 @@ def failover_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'failover_instance' not in self._stubs: - self._stubs['failover_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/FailoverInstance', + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/FailoverInstance", request_serializer=cloud_redis.FailoverInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['failover_instance'] + return self._stubs["failover_instance"] @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a specific Redis instance. Instance stops @@ -648,18 +655,18 @@ def delete_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/DeleteInstance", request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_instance'] + return self._stubs["delete_instance"] @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - operations_pb2.Operation]: + def reschedule_maintenance( + self, + ) -> Callable[[cloud_redis.RescheduleMaintenanceRequest], operations_pb2.Operation]: r"""Return a callable for the reschedule maintenance method over gRPC. Reschedule maintenance for a given instance in a @@ -675,13 +682,13 @@ def reschedule_maintenance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'reschedule_maintenance' not in self._stubs: - self._stubs['reschedule_maintenance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance", request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['reschedule_maintenance'] + return self._stubs["reschedule_maintenance"] def close(self): self._logged_channel.close() @@ -690,8 +697,7 @@ def close(self): def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -708,8 +714,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -726,8 +731,7 @@ def cancel_operation( def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -744,8 +748,7 @@ def wait_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -762,8 +765,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -780,8 +782,7 @@ def list_operations( def list_locations( self, ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -798,8 +799,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -817,6 +817,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'CloudRedisGrpcTransport', -) +__all__ = ("CloudRedisGrpcTransport",) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 46f96b5c5c..37bc427afa 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -25,23 +25,24 @@ from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO from .grpc import CloudRedisGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -61,10 +62,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -72,7 +75,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -98,7 +101,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -145,13 +148,15 @@ class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -182,29 +187,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -279,7 +286,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -288,7 +296,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -348,17 +357,15 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsAsyncClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - Awaitable[cloud_redis.ListInstancesResponse]]: + def list_instances( + self, + ) -> Callable[[cloud_redis.ListInstancesRequest], Awaitable[cloud_redis.ListInstancesResponse]]: r"""Return a callable for the list instances method over gRPC. Lists all Redis instances owned by a project in either the @@ -382,18 +389,18 @@ def list_instances(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ListInstances', + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ListInstances", request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, ) - return self._stubs['list_instances'] + return self._stubs["list_instances"] @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - Awaitable[cloud_redis.Instance]]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], Awaitable[cloud_redis.Instance]]: r"""Return a callable for the get instance method over gRPC. Gets the details of a specific Redis instance. @@ -408,18 +415,18 @@ def get_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstance', + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstance", request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, ) - return self._stubs['get_instance'] + return self._stubs["get_instance"] @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - Awaitable[cloud_redis.InstanceAuthString]]: + def get_instance_auth_string( + self, + ) -> Callable[[cloud_redis.GetInstanceAuthStringRequest], Awaitable[cloud_redis.InstanceAuthString]]: r"""Return a callable for the get instance auth string method over gRPC. Gets the AUTH string for a Redis instance. If AUTH is @@ -437,18 +444,18 @@ def get_instance_auth_string(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance_auth_string' not in self._stubs: - self._stubs['get_instance_auth_string'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', + if "get_instance_auth_string" not in self._stubs: + self._stubs["get_instance_auth_string"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString", request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, response_deserializer=cloud_redis.InstanceAuthString.deserialize, ) - return self._stubs['get_instance_auth_string'] + return self._stubs["get_instance_auth_string"] @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create instance method over gRPC. Creates a Redis instance based on the specified tier and memory @@ -476,18 +483,18 @@ def create_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/CreateInstance', + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/CreateInstance", request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_instance'] + return self._stubs["create_instance"] @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update instance method over gRPC. Updates the metadata and configuration of a specific @@ -507,18 +514,18 @@ def update_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpdateInstance", request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_instance'] + return self._stubs["update_instance"] @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def upgrade_instance( + self, + ) -> Callable[[cloud_redis.UpgradeInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the upgrade instance method over gRPC. Upgrades Redis instance to the newer Redis version @@ -534,18 +541,18 @@ def upgrade_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'upgrade_instance' not in self._stubs: - self._stubs['upgrade_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', + if "upgrade_instance" not in self._stubs: + self._stubs["upgrade_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpgradeInstance", request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['upgrade_instance'] + return self._stubs["upgrade_instance"] @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def import_instance( + self, + ) -> Callable[[cloud_redis.ImportInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the import instance method over gRPC. Import a Redis RDB snapshot file from Cloud Storage @@ -568,18 +575,18 @@ def import_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'import_instance' not in self._stubs: - self._stubs['import_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ImportInstance', + if "import_instance" not in self._stubs: + self._stubs["import_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ImportInstance", request_serializer=cloud_redis.ImportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['import_instance'] + return self._stubs["import_instance"] @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def export_instance( + self, + ) -> Callable[[cloud_redis.ExportInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the export instance method over gRPC. Export Redis instance data into a Redis RDB format @@ -599,18 +606,18 @@ def export_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'export_instance' not in self._stubs: - self._stubs['export_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ExportInstance', + if "export_instance" not in self._stubs: + self._stubs["export_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ExportInstance", request_serializer=cloud_redis.ExportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['export_instance'] + return self._stubs["export_instance"] @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def failover_instance( + self, + ) -> Callable[[cloud_redis.FailoverInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the failover instance method over gRPC. Initiates a failover of the primary node to current @@ -627,18 +634,18 @@ def failover_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'failover_instance' not in self._stubs: - self._stubs['failover_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/FailoverInstance', + if "failover_instance" not in self._stubs: + self._stubs["failover_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/FailoverInstance", request_serializer=cloud_redis.FailoverInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['failover_instance'] + return self._stubs["failover_instance"] @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete instance method over gRPC. Deletes a specific Redis instance. Instance stops @@ -654,18 +661,18 @@ def delete_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/DeleteInstance", request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_instance'] + return self._stubs["delete_instance"] @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - Awaitable[operations_pb2.Operation]]: + def reschedule_maintenance( + self, + ) -> Callable[[cloud_redis.RescheduleMaintenanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the reschedule maintenance method over gRPC. Reschedule maintenance for a given instance in a @@ -681,16 +688,16 @@ def reschedule_maintenance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'reschedule_maintenance' not in self._stubs: - self._stubs['reschedule_maintenance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', + if "reschedule_maintenance" not in self._stubs: + self._stubs["reschedule_maintenance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance", request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['reschedule_maintenance'] + return self._stubs["reschedule_maintenance"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_instances: self._wrap_method( self.list_instances, @@ -800,8 +807,7 @@ def kind(self) -> str: def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -818,8 +824,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -836,8 +841,7 @@ def cancel_operation( def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -854,8 +858,7 @@ def wait_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -872,8 +875,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -890,8 +892,7 @@ def list_operations( def list_locations( self, ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -908,8 +909,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -923,6 +923,4 @@ def get_location( return self._stubs["get_location"] -__all__ = ( - 'CloudRedisGrpcAsyncIOTransport', -) +__all__ = ("CloudRedisGrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index e9a1c7a484..26cf0f8b03 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -27,7 +27,7 @@ from google.protobuf import json_format from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses @@ -49,6 +49,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -173,7 +174,12 @@ def post_upgrade_instance(self, response): """ - def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_create_instance( + self, + request: cloud_redis.CreateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -181,7 +187,10 @@ def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metada """ return request, metadata - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance DEPRECATED. Please use the `post_create_instance_with_metadata` @@ -194,7 +203,11 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_instance Override in a subclass to read or manipulate the response or metadata after it @@ -209,7 +222,11 @@ def post_create_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_instance( + self, + request: cloud_redis.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -217,7 +234,10 @@ def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metada """ return request, metadata - def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance DEPRECATED. Please use the `post_delete_instance_with_metadata` @@ -230,7 +250,11 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_instance Override in a subclass to read or manipulate the response or metadata after it @@ -245,7 +269,11 @@ def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_export_instance( + self, + request: cloud_redis.ExportInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_instance Override in a subclass to manipulate the request or metadata @@ -253,7 +281,10 @@ def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metada """ return request, metadata - def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_export_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for export_instance DEPRECATED. Please use the `post_export_instance_with_metadata` @@ -266,7 +297,11 @@ def post_export_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_export_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_export_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for export_instance Override in a subclass to read or manipulate the response or metadata after it @@ -281,7 +316,11 @@ def post_export_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_failover_instance( + self, + request: cloud_redis.FailoverInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for failover_instance Override in a subclass to manipulate the request or metadata @@ -289,7 +328,10 @@ def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, me """ return request, metadata - def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_failover_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance DEPRECATED. Please use the `post_failover_instance_with_metadata` @@ -302,7 +344,11 @@ def post_failover_instance(self, response: operations_pb2.Operation) -> operatio """ return response - def post_failover_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_failover_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for failover_instance Override in a subclass to read or manipulate the response or metadata after it @@ -317,7 +363,11 @@ def post_failover_instance_with_metadata(self, response: operations_pb2.Operatio """ return response, metadata - def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_instance( + self, + request: cloud_redis.GetInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -325,7 +375,10 @@ def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Se """ return request, metadata - def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + def post_get_instance( + self, + response: cloud_redis.Instance, + ) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance DEPRECATED. Please use the `post_get_instance_with_metadata` @@ -338,7 +391,11 @@ def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Insta """ return response - def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_instance_with_metadata( + self, + response: cloud_redis.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance Override in a subclass to read or manipulate the response or metadata after it @@ -353,7 +410,11 @@ def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metada """ return response, metadata - def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_instance_auth_string( + self, + request: cloud_redis.GetInstanceAuthStringRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance_auth_string Override in a subclass to manipulate the request or metadata @@ -361,7 +422,10 @@ def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStrin """ return request, metadata - def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: + def post_get_instance_auth_string( + self, + response: cloud_redis.InstanceAuthString, + ) -> cloud_redis.InstanceAuthString: """Post-rpc interceptor for get_instance_auth_string DEPRECATED. Please use the `post_get_instance_auth_string_with_metadata` @@ -374,7 +438,11 @@ def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString """ return response - def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.InstanceAuthString, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_instance_auth_string_with_metadata( + self, + response: cloud_redis.InstanceAuthString, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance_auth_string Override in a subclass to read or manipulate the response or metadata after it @@ -389,7 +457,11 @@ def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.Inst """ return response, metadata - def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_import_instance( + self, + request: cloud_redis.ImportInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for import_instance Override in a subclass to manipulate the request or metadata @@ -397,7 +469,10 @@ def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metada """ return request, metadata - def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_import_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for import_instance DEPRECATED. Please use the `post_import_instance_with_metadata` @@ -410,7 +485,11 @@ def post_import_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_import_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_import_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for import_instance Override in a subclass to read or manipulate the response or metadata after it @@ -425,7 +504,11 @@ def post_import_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_instances( + self, + request: cloud_redis.ListInstancesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -433,7 +516,10 @@ def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata """ return request, metadata - def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + def post_list_instances( + self, + response: cloud_redis.ListInstancesResponse, + ) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances DEPRECATED. Please use the `post_list_instances_with_metadata` @@ -446,7 +532,11 @@ def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cl """ return response - def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_instances_with_metadata( + self, + response: cloud_redis.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_instances Override in a subclass to read or manipulate the response or metadata after it @@ -461,7 +551,11 @@ def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesR """ return response, metadata - def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_reschedule_maintenance( + self, + request: cloud_redis.RescheduleMaintenanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reschedule_maintenance Override in a subclass to manipulate the request or metadata @@ -469,7 +563,10 @@ def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceR """ return request, metadata - def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_reschedule_maintenance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` @@ -482,7 +579,11 @@ def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> ope """ return response - def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_reschedule_maintenance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for reschedule_maintenance Override in a subclass to read or manipulate the response or metadata after it @@ -497,7 +598,11 @@ def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Ope """ return response, metadata - def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_instance( + self, + request: cloud_redis.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -505,7 +610,10 @@ def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metada """ return request, metadata - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance DEPRECATED. Please use the `post_update_instance_with_metadata` @@ -518,7 +626,11 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_instance Override in a subclass to read or manipulate the response or metadata after it @@ -533,7 +645,11 @@ def post_update_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_upgrade_instance( + self, + request: cloud_redis.UpgradeInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for upgrade_instance Override in a subclass to manipulate the request or metadata @@ -541,7 +657,10 @@ def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, meta """ return request, metadata - def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_upgrade_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_instance DEPRECATED. Please use the `post_upgrade_instance_with_metadata` @@ -554,7 +673,11 @@ def post_upgrade_instance(self, response: operations_pb2.Operation) -> operation """ return response - def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_upgrade_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for upgrade_instance Override in a subclass to read or manipulate the response or metadata after it @@ -570,7 +693,9 @@ def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation return response, metadata def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location @@ -580,7 +705,8 @@ def pre_get_location( return request, metadata def post_get_location( - self, response: locations_pb2.Location + self, + response: locations_pb2.Location, ) -> locations_pb2.Location: """Post-rpc interceptor for get_location @@ -591,7 +717,9 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations @@ -601,7 +729,8 @@ def pre_list_locations( return request, metadata def post_list_locations( - self, response: locations_pb2.ListLocationsResponse + self, + response: locations_pb2.ListLocationsResponse, ) -> locations_pb2.ListLocationsResponse: """Post-rpc interceptor for list_locations @@ -612,7 +741,9 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation @@ -622,7 +753,8 @@ def pre_cancel_operation( return request, metadata def post_cancel_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for cancel_operation @@ -633,7 +765,9 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation @@ -643,7 +777,8 @@ def pre_delete_operation( return request, metadata def post_delete_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for delete_operation @@ -654,7 +789,9 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation @@ -664,7 +801,8 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -675,7 +813,9 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations @@ -685,7 +825,8 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsResponse + self, + response: operations_pb2.ListOperationsResponse, ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -696,7 +837,9 @@ def post_list_operations( return response def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for wait_operation @@ -706,7 +849,8 @@ def pre_wait_operation( return request, metadata def post_wait_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for wait_operation @@ -756,29 +900,30 @@ class CloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[CloudRedisRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudRedisRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -817,10 +962,9 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -837,46 +981,47 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], - 'google.longrunning.Operations.WaitOperation': [ + "google.longrunning.Operations.WaitOperation": [ { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -895,27 +1040,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -949,21 +1096,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "httpRequest": http_request, @@ -972,7 +1119,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -992,13 +1147,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.create_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "metadata": http_response["headers"], @@ -1019,26 +1174,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -1070,21 +1227,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "httpRequest": http_request, @@ -1093,7 +1250,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1113,13 +1277,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.delete_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "metadata": http_response["headers"], @@ -1140,27 +1304,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.ExportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.ExportInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. Args: @@ -1194,21 +1360,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", "httpRequest": http_request, @@ -1217,7 +1383,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._ExportInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1237,13 +1411,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.export_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", "metadata": http_response["headers"], @@ -1264,27 +1438,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.FailoverInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.FailoverInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. Args: @@ -1318,21 +1494,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", "httpRequest": http_request, @@ -1341,7 +1517,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._FailoverInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1361,13 +1545,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.failover_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", "metadata": http_response["headers"], @@ -1388,26 +1572,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.Instance: + def __call__( + self, + request: cloud_redis.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. Args: @@ -1436,21 +1622,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "httpRequest": http_request, @@ -1459,7 +1645,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1481,13 +1674,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "metadata": http_response["headers"], @@ -1508,26 +1701,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.GetInstanceAuthStringRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.InstanceAuthString: + def __call__( + self, + request: cloud_redis.GetInstanceAuthStringRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.InstanceAuthString: r"""Call the get instance auth string method over HTTP. Args: @@ -1556,21 +1751,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", "httpRequest": http_request, @@ -1579,7 +1774,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetInstanceAuthString._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1601,13 +1803,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance_auth_string", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", "metadata": http_response["headers"], @@ -1628,27 +1830,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.ImportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.ImportInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. Args: @@ -1682,21 +1886,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", "httpRequest": http_request, @@ -1705,7 +1909,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._ImportInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1725,13 +1937,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.import_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", "metadata": http_response["headers"], @@ -1752,26 +1964,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.ListInstancesResponse: + def __call__( + self, + request: cloud_redis.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -1802,21 +2016,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "httpRequest": http_request, @@ -1825,7 +2039,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1847,13 +2068,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.list_instances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "metadata": http_response["headers"], @@ -1874,27 +2095,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.RescheduleMaintenanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.RescheduleMaintenanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the reschedule maintenance method over HTTP. Args: @@ -1928,21 +2151,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", "httpRequest": http_request, @@ -1951,7 +2174,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._RescheduleMaintenance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1971,13 +2202,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.reschedule_maintenance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", "metadata": http_response["headers"], @@ -1998,27 +2229,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -2052,21 +2285,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "httpRequest": http_request, @@ -2075,7 +2308,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2095,13 +2336,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.update_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "metadata": http_response["headers"], @@ -2122,27 +2363,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.UpgradeInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.UpgradeInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. Args: @@ -2176,21 +2419,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", "httpRequest": http_request, @@ -2199,7 +2442,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._UpgradeInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2219,13 +2470,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.upgrade_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", "metadata": http_response["headers"], @@ -2234,97 +2485,119 @@ def __call__(self, ) return resp + # fmt: off @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - operations_pb2.Operation]: + def export_instance( + self + ) -> Callable[[cloud_redis.ExportInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore + return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - operations_pb2.Operation]: + def failover_instance( + self + ) -> Callable[[cloud_redis.FailoverInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - cloud_redis.InstanceAuthString]: + def get_instance_auth_string( + self + ) -> Callable[[cloud_redis.GetInstanceAuthStringRequest], cloud_redis.InstanceAuthString]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - operations_pb2.Operation]: + def import_instance( + self + ) -> Callable[[cloud_redis.ImportInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore + return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self + ) -> Callable[[cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - operations_pb2.Operation]: + def reschedule_maintenance( + self + ) -> Callable[[cloud_redis.RescheduleMaintenanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - operations_pb2.Operation]: + def upgrade_instance( + self + ) -> Callable[[cloud_redis.UpgradeInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): def __hash__(self): @@ -2338,27 +2611,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -2385,21 +2659,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpRequest": http_request, @@ -2408,7 +2682,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2426,12 +2707,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpResponse": http_response, @@ -2442,7 +2723,7 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): def __hash__(self): @@ -2456,27 +2737,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -2503,21 +2785,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpRequest": http_request, @@ -2526,7 +2808,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2544,12 +2833,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpResponse": http_response, @@ -2560,7 +2849,7 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): def __hash__(self): @@ -2574,27 +2863,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -2618,21 +2908,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -2641,7 +2931,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2652,7 +2949,7 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): def __hash__(self): @@ -2666,27 +2963,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -2710,21 +3008,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -2733,7 +3031,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2744,7 +3049,7 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): def __hash__(self): @@ -2758,27 +3063,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -2805,21 +3111,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpRequest": http_request, @@ -2828,7 +3134,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2846,12 +3159,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpResponse": http_response, @@ -2862,7 +3175,7 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): def __hash__(self): @@ -2876,27 +3189,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -2923,21 +3237,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpRequest": http_request, @@ -2946,7 +3260,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2964,12 +3285,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpResponse": http_response, @@ -2980,7 +3301,7 @@ def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): def __hash__(self): @@ -2994,28 +3315,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: operations_pb2.WaitOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. Args: @@ -3044,21 +3366,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpRequest": http_request, @@ -3067,7 +3389,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._WaitOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3085,12 +3415,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpResponse": http_response, @@ -3107,6 +3437,4 @@ def close(self): self._session.close() -__all__=( - 'CloudRedisRestTransport', -) +__all__ = ("CloudRedisRestTransport",) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 832be441c1..004e6c2a7f 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -15,20 +15,23 @@ # import google.auth + try: - import aiohttp # type: ignore - from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore - from google.api_core import rest_streaming_async # type: ignore - from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore except ImportError as e: # pragma: NO COVER - raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`") from e + raise ImportError( + "`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`" + ) from e from google.auth.aio import credentials as ga_credentials_async # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.api_core import retry_async as retries from google.api_core import rest_helpers from google.api_core import rest_streaming_async # type: ignore @@ -36,7 +39,7 @@ from google.protobuf import json_format from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore import json # type: ignore import dataclasses @@ -56,6 +59,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -185,7 +189,12 @@ async def post_upgrade_instance(self, response): """ - async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + async def pre_create_instance( + self, + request: cloud_redis.CreateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -193,7 +202,10 @@ async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, """ return request, metadata - async def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_create_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance DEPRECATED. Please use the `post_create_instance_with_metadata` @@ -206,7 +218,11 @@ async def post_create_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_instance Override in a subclass to read or manipulate the response or metadata after it @@ -221,7 +237,11 @@ async def post_create_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_delete_instance( + self, + request: cloud_redis.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -229,7 +249,10 @@ async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, """ return request, metadata - async def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_delete_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance DEPRECATED. Please use the `post_delete_instance_with_metadata` @@ -242,7 +265,11 @@ async def post_delete_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_instance Override in a subclass to read or manipulate the response or metadata after it @@ -257,7 +284,11 @@ async def post_delete_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_export_instance( + self, + request: cloud_redis.ExportInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_instance Override in a subclass to manipulate the request or metadata @@ -265,7 +296,10 @@ async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, """ return request, metadata - async def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_export_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for export_instance DEPRECATED. Please use the `post_export_instance_with_metadata` @@ -278,7 +312,11 @@ async def post_export_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_export_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_export_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for export_instance Override in a subclass to read or manipulate the response or metadata after it @@ -293,7 +331,11 @@ async def post_export_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_failover_instance( + self, + request: cloud_redis.FailoverInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for failover_instance Override in a subclass to manipulate the request or metadata @@ -301,7 +343,10 @@ async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceReque """ return request, metadata - async def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_failover_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance DEPRECATED. Please use the `post_failover_instance_with_metadata` @@ -314,7 +359,11 @@ async def post_failover_instance(self, response: operations_pb2.Operation) -> op """ return response - async def post_failover_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_failover_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for failover_instance Override in a subclass to read or manipulate the response or metadata after it @@ -329,7 +378,11 @@ async def post_failover_instance_with_metadata(self, response: operations_pb2.Op """ return response, metadata - async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_get_instance( + self, + request: cloud_redis.GetInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -337,7 +390,10 @@ async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metada """ return request, metadata - async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + async def post_get_instance( + self, + response: cloud_redis.Instance, + ) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance DEPRECATED. Please use the `post_get_instance_with_metadata` @@ -350,7 +406,11 @@ async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis """ return response - async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_get_instance_with_metadata( + self, + response: cloud_redis.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance Override in a subclass to read or manipulate the response or metadata after it @@ -365,7 +425,11 @@ async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, """ return response, metadata - async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_get_instance_auth_string( + self, + request: cloud_redis.GetInstanceAuthStringRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance_auth_string Override in a subclass to manipulate the request or metadata @@ -373,7 +437,10 @@ async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAut """ return request, metadata - async def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: + async def post_get_instance_auth_string( + self, + response: cloud_redis.InstanceAuthString, + ) -> cloud_redis.InstanceAuthString: """Post-rpc interceptor for get_instance_auth_string DEPRECATED. Please use the `post_get_instance_auth_string_with_metadata` @@ -386,7 +453,11 @@ async def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuth """ return response - async def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.InstanceAuthString, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_get_instance_auth_string_with_metadata( + self, + response: cloud_redis.InstanceAuthString, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance_auth_string Override in a subclass to read or manipulate the response or metadata after it @@ -401,7 +472,11 @@ async def post_get_instance_auth_string_with_metadata(self, response: cloud_redi """ return response, metadata - async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_import_instance( + self, + request: cloud_redis.ImportInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for import_instance Override in a subclass to manipulate the request or metadata @@ -409,7 +484,10 @@ async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, """ return request, metadata - async def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_import_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for import_instance DEPRECATED. Please use the `post_import_instance_with_metadata` @@ -422,7 +500,11 @@ async def post_import_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_import_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_import_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for import_instance Override in a subclass to read or manipulate the response or metadata after it @@ -437,7 +519,11 @@ async def post_import_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_list_instances( + self, + request: cloud_redis.ListInstancesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -445,7 +531,10 @@ async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, me """ return request, metadata - async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + async def post_list_instances( + self, + response: cloud_redis.ListInstancesResponse, + ) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances DEPRECATED. Please use the `post_list_instances_with_metadata` @@ -458,7 +547,11 @@ async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) """ return response - async def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_list_instances_with_metadata( + self, + response: cloud_redis.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_instances Override in a subclass to read or manipulate the response or metadata after it @@ -473,7 +566,11 @@ async def post_list_instances_with_metadata(self, response: cloud_redis.ListInst """ return response, metadata - async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_reschedule_maintenance( + self, + request: cloud_redis.RescheduleMaintenanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reschedule_maintenance Override in a subclass to manipulate the request or metadata @@ -481,7 +578,10 @@ async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMainte """ return request, metadata - async def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_reschedule_maintenance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` @@ -494,7 +594,11 @@ async def post_reschedule_maintenance(self, response: operations_pb2.Operation) """ return response - async def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_reschedule_maintenance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for reschedule_maintenance Override in a subclass to read or manipulate the response or metadata after it @@ -509,7 +613,11 @@ async def post_reschedule_maintenance_with_metadata(self, response: operations_p """ return response, metadata - async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_update_instance( + self, + request: cloud_redis.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -517,7 +625,10 @@ async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, """ return request, metadata - async def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_update_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance DEPRECATED. Please use the `post_update_instance_with_metadata` @@ -530,7 +641,11 @@ async def post_update_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_instance Override in a subclass to read or manipulate the response or metadata after it @@ -545,7 +660,11 @@ async def post_update_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_upgrade_instance( + self, + request: cloud_redis.UpgradeInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for upgrade_instance Override in a subclass to manipulate the request or metadata @@ -553,7 +672,10 @@ async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest """ return request, metadata - async def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_upgrade_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_instance DEPRECATED. Please use the `post_upgrade_instance_with_metadata` @@ -566,7 +688,11 @@ async def post_upgrade_instance(self, response: operations_pb2.Operation) -> ope """ return response - async def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_upgrade_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for upgrade_instance Override in a subclass to read or manipulate the response or metadata after it @@ -582,7 +708,9 @@ async def post_upgrade_instance_with_metadata(self, response: operations_pb2.Ope return response, metadata async def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location @@ -592,7 +720,8 @@ async def pre_get_location( return request, metadata async def post_get_location( - self, response: locations_pb2.Location + self, + response: locations_pb2.Location, ) -> locations_pb2.Location: """Post-rpc interceptor for get_location @@ -603,7 +732,9 @@ async def post_get_location( return response async def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations @@ -613,7 +744,8 @@ async def pre_list_locations( return request, metadata async def post_list_locations( - self, response: locations_pb2.ListLocationsResponse + self, + response: locations_pb2.ListLocationsResponse, ) -> locations_pb2.ListLocationsResponse: """Post-rpc interceptor for list_locations @@ -624,7 +756,9 @@ async def post_list_locations( return response async def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation @@ -634,7 +768,8 @@ async def pre_cancel_operation( return request, metadata async def post_cancel_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for cancel_operation @@ -645,7 +780,9 @@ async def post_cancel_operation( return response async def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation @@ -655,7 +792,8 @@ async def pre_delete_operation( return request, metadata async def post_delete_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for delete_operation @@ -666,7 +804,9 @@ async def post_delete_operation( return response async def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation @@ -676,7 +816,8 @@ async def pre_get_operation( return request, metadata async def post_get_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -687,7 +828,9 @@ async def post_get_operation( return response async def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations @@ -697,7 +840,8 @@ async def pre_list_operations( return request, metadata async def post_list_operations( - self, response: operations_pb2.ListOperationsResponse + self, + response: operations_pb2.ListOperationsResponse, ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -708,7 +852,9 @@ async def post_list_operations( return response async def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for wait_operation @@ -718,7 +864,8 @@ async def pre_wait_operation( return request, metadata async def post_wait_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for wait_operation @@ -735,6 +882,7 @@ class AsyncCloudRedisRestStub: _host: str _interceptor: AsyncCloudRedisRestInterceptor + class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): """Asynchronous REST backend transport for CloudRedis. @@ -766,23 +914,25 @@ class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, - *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials_async.Credentials] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - url_scheme: str = 'https', - interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, - ) -> None: + + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials_async.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + url_scheme: str = "https", + interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, + ) -> None: """Instantiate the transport. - NOTE: This async REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! + NOTE: This async REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.aio.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -804,7 +954,7 @@ def __init__(self, client_info=client_info, always_use_jwt_access=False, url_scheme=url_scheme, - api_audience=None + api_audience=None, ) self._session = AsyncAuthorizedSession(self._credentials) # type: ignore self._interceptor = interceptor or AsyncCloudRedisRestInterceptor() @@ -813,7 +963,7 @@ def __init__(self, self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_instances: self._wrap_method( self.list_instances, @@ -924,27 +1074,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -978,21 +1130,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "httpRequest": http_request, @@ -1001,15 +1153,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1027,12 +1187,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "metadata": http_response["headers"], @@ -1054,26 +1214,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -1105,21 +1267,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "httpRequest": http_request, @@ -1128,15 +1290,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1154,12 +1323,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "metadata": http_response["headers"], @@ -1181,27 +1350,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.ExportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.ExportInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. Args: @@ -1235,21 +1406,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", "httpRequest": http_request, @@ -1258,15 +1429,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._ExportInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1284,12 +1463,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.export_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", "metadata": http_response["headers"], @@ -1311,27 +1490,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.FailoverInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.FailoverInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. Args: @@ -1365,21 +1546,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", "httpRequest": http_request, @@ -1388,15 +1569,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._FailoverInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1414,12 +1603,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.failover_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", "metadata": http_response["headers"], @@ -1441,26 +1630,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.Instance: + async def __call__( + self, + request: cloud_redis.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. Args: @@ -1489,21 +1680,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "httpRequest": http_request, @@ -1512,15 +1703,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1538,12 +1736,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "metadata": http_response["headers"], @@ -1565,26 +1763,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.GetInstanceAuthStringRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.InstanceAuthString: + async def __call__( + self, + request: cloud_redis.GetInstanceAuthStringRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.InstanceAuthString: r"""Call the get instance auth string method over HTTP. Args: @@ -1613,21 +1813,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", "httpRequest": http_request, @@ -1636,15 +1836,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetInstanceAuthString._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1662,12 +1869,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance_auth_string", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", "metadata": http_response["headers"], @@ -1689,27 +1896,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.ImportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.ImportInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. Args: @@ -1743,21 +1952,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", "httpRequest": http_request, @@ -1766,15 +1975,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._ImportInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1792,12 +2009,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.import_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", "metadata": http_response["headers"], @@ -1819,26 +2036,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.ListInstancesResponse: + async def __call__( + self, + request: cloud_redis.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -1869,21 +2088,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "httpRequest": http_request, @@ -1892,15 +2111,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1918,12 +2144,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "metadata": http_response["headers"], @@ -1945,27 +2171,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.RescheduleMaintenanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.RescheduleMaintenanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the reschedule maintenance method over HTTP. Args: @@ -1999,21 +2227,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", "httpRequest": http_request, @@ -2022,15 +2250,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._RescheduleMaintenance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -2048,12 +2284,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.reschedule_maintenance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", "metadata": http_response["headers"], @@ -2075,27 +2311,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -2129,21 +2367,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "httpRequest": http_request, @@ -2152,15 +2390,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -2178,12 +2424,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "metadata": http_response["headers"], @@ -2205,27 +2451,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.UpgradeInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.UpgradeInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. Args: @@ -2259,21 +2507,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", "httpRequest": http_request, @@ -2282,15 +2530,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._UpgradeInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -2308,12 +2564,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.upgrade_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", "metadata": http_response["headers"], @@ -2333,45 +2589,45 @@ def operations_client(self) -> AsyncOperationsRestClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], - 'google.longrunning.Operations.WaitOperation': [ + "google.longrunning.Operations.WaitOperation": [ { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", }, ], } rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore - host=self._host, - # use the credentials which are saved - credentials=self._credentials, # type: ignore - http_options=http_options, - path_prefix="v1" + host=self._host, + # use the credentials which are saved + credentials=self._credentials, # type: ignore + http_options=http_options, + path_prefix="v1", ) self._operations_client = AsyncOperationsRestClient(transport=rest_transport) @@ -2380,74 +2636,74 @@ def operations_client(self) -> AsyncOperationsRestClient: return self._operations_client @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def export_instance(self) -> Callable[ - [cloud_redis.ExportInstanceRequest], - operations_pb2.Operation]: + def export_instance( + self, + ) -> Callable[[cloud_redis.ExportInstanceRequest], operations_pb2.Operation]: return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore @property - def failover_instance(self) -> Callable[ - [cloud_redis.FailoverInstanceRequest], - operations_pb2.Operation]: + def failover_instance( + self, + ) -> Callable[[cloud_redis.FailoverInstanceRequest], operations_pb2.Operation]: return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance_auth_string(self) -> Callable[ - [cloud_redis.GetInstanceAuthStringRequest], - cloud_redis.InstanceAuthString]: + def get_instance_auth_string( + self, + ) -> Callable[[cloud_redis.GetInstanceAuthStringRequest], cloud_redis.InstanceAuthString]: return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore @property - def import_instance(self) -> Callable[ - [cloud_redis.ImportInstanceRequest], - operations_pb2.Operation]: + def import_instance( + self, + ) -> Callable[[cloud_redis.ImportInstanceRequest], operations_pb2.Operation]: return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[[cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse]: return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def reschedule_maintenance(self) -> Callable[ - [cloud_redis.RescheduleMaintenanceRequest], - operations_pb2.Operation]: + def reschedule_maintenance( + self, + ) -> Callable[[cloud_redis.RescheduleMaintenanceRequest], operations_pb2.Operation]: return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def upgrade_instance(self) -> Callable[ - [cloud_redis.UpgradeInstanceRequest], - operations_pb2.Operation]: + def upgrade_instance( + self, + ) -> Callable[[cloud_redis.UpgradeInstanceRequest], operations_pb2.Operation]: return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2461,27 +2717,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + async def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -2508,21 +2765,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpRequest": http_request, @@ -2531,15 +2788,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -2553,12 +2817,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpResponse": http_response, @@ -2569,7 +2833,7 @@ async def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): def __hash__(self): @@ -2583,27 +2847,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + async def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -2630,21 +2895,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpRequest": http_request, @@ -2653,15 +2918,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -2675,12 +2947,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpResponse": http_response, @@ -2691,7 +2963,7 @@ async def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2705,27 +2977,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + async def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -2749,21 +3022,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -2772,22 +3045,29 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore return await self._interceptor.post_cancel_operation(None) @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2801,27 +3081,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + async def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -2845,21 +3126,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -2868,22 +3149,29 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore return await self._interceptor.post_delete_operation(None) @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2897,27 +3185,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + async def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -2944,21 +3233,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpRequest": http_request, @@ -2967,15 +3256,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -2989,12 +3285,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpResponse": http_response, @@ -3005,7 +3301,7 @@ async def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): def __hash__(self): @@ -3019,27 +3315,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + async def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -3066,21 +3363,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpRequest": http_request, @@ -3089,15 +3386,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -3111,12 +3415,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpResponse": http_response, @@ -3127,7 +3431,7 @@ async def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -3141,28 +3445,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: operations_pb2.WaitOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + async def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. Args: @@ -3191,21 +3496,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpRequest": http_request, @@ -3214,15 +3519,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._WaitOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -3236,12 +3549,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpResponse": http_response, diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index 56df283219..27a51d9471 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -18,7 +18,7 @@ from google.api_core import gapic_v1 from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO import re @@ -42,18 +42,20 @@ class _BaseCloudRedisRestTransport(CloudRedisTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -84,15 +86,18 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -100,11 +105,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, ] return http_options @@ -119,16 +125,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) return query_params @@ -137,8 +146,10 @@ class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -146,10 +157,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -161,10 +173,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) return query_params @@ -173,8 +187,10 @@ class _BaseExportInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -182,11 +198,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:export', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:export", + "body": "*", + }, ] return http_options @@ -201,16 +218,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseExportInstance._get_unset_required_fields(query_params)) return query_params @@ -219,8 +239,10 @@ class _BaseFailoverInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -228,11 +250,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:failover', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:failover", + "body": "*", + }, ] return http_options @@ -247,16 +270,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseFailoverInstance._get_unset_required_fields(query_params)) return query_params @@ -265,8 +291,10 @@ class _BaseGetInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -274,10 +302,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -289,10 +318,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) return query_params @@ -301,8 +332,10 @@ class _BaseGetInstanceAuthString: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -310,10 +343,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}/authString', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}/authString", + }, ] return http_options @@ -325,10 +359,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_unset_required_fields(query_params)) return query_params @@ -337,8 +373,10 @@ class _BaseImportInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -346,11 +384,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:import', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:import", + "body": "*", + }, ] return http_options @@ -365,16 +404,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseImportInstance._get_unset_required_fields(query_params)) return query_params @@ -383,8 +425,10 @@ class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -392,10 +436,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, ] return http_options @@ -407,10 +452,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseListInstances._get_unset_required_fields(query_params)) return query_params @@ -419,8 +466,10 @@ class _BaseRescheduleMaintenance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -428,11 +477,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance", + "body": "*", + }, ] return http_options @@ -447,16 +497,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_unset_required_fields(query_params)) return query_params @@ -465,8 +518,11 @@ class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -474,11 +530,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, ] return http_options @@ -493,16 +550,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) return query_params @@ -511,8 +571,10 @@ class _BaseUpgradeInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -520,11 +582,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:upgrade', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*}:upgrade", + "body": "*", + }, ] return http_options @@ -539,16 +602,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseUpgradeInstance._get_unset_required_fields(query_params)) return query_params @@ -559,10 +625,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @@ -570,12 +637,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -584,10 +653,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @@ -595,12 +665,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -609,10 +681,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, ] return http_options @@ -620,12 +693,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -634,10 +709,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -645,12 +721,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -659,10 +737,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -670,12 +749,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -684,10 +765,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options @@ -695,12 +777,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseWaitOperation: @@ -709,11 +793,12 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", + }, ] return http_options @@ -721,19 +806,20 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseCloudRedisRestTransport', -) +__all__ = ("_BaseCloudRedisRestTransport",) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py index 4103832ed2..faaca194d7 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -44,31 +44,31 @@ ) __all__ = ( - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'ExportInstanceRequest', - 'FailoverInstanceRequest', - 'GcsDestination', - 'GcsSource', - 'GetInstanceAuthStringRequest', - 'GetInstanceRequest', - 'ImportInstanceRequest', - 'InputConfig', - 'Instance', - 'InstanceAuthString', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'LocationMetadata', - 'MaintenancePolicy', - 'MaintenanceSchedule', - 'NodeInfo', - 'OperationMetadata', - 'OutputConfig', - 'PersistenceConfig', - 'RescheduleMaintenanceRequest', - 'TlsCertificate', - 'UpdateInstanceRequest', - 'UpgradeInstanceRequest', - 'WeeklyMaintenanceWindow', - 'ZoneMetadata', + "CreateInstanceRequest", + "DeleteInstanceRequest", + "ExportInstanceRequest", + "FailoverInstanceRequest", + "GcsDestination", + "GcsSource", + "GetInstanceAuthStringRequest", + "GetInstanceRequest", + "ImportInstanceRequest", + "InputConfig", + "Instance", + "InstanceAuthString", + "ListInstancesRequest", + "ListInstancesResponse", + "LocationMetadata", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "OutputConfig", + "PersistenceConfig", + "RescheduleMaintenanceRequest", + "TlsCertificate", + "UpdateInstanceRequest", + "UpgradeInstanceRequest", + "WeeklyMaintenanceWindow", + "ZoneMetadata", ) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 8e6a18eba6..ddd2399745 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -26,38 +26,40 @@ from google.type import timeofday_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.redis.v1', + package="google.cloud.redis.v1", manifest={ - 'NodeInfo', - 'Instance', - 'PersistenceConfig', - 'RescheduleMaintenanceRequest', - 'MaintenancePolicy', - 'WeeklyMaintenanceWindow', - 'MaintenanceSchedule', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'GetInstanceRequest', - 'GetInstanceAuthStringRequest', - 'InstanceAuthString', - 'CreateInstanceRequest', - 'UpdateInstanceRequest', - 'UpgradeInstanceRequest', - 'DeleteInstanceRequest', - 'GcsSource', - 'InputConfig', - 'ImportInstanceRequest', - 'GcsDestination', - 'OutputConfig', - 'ExportInstanceRequest', - 'FailoverInstanceRequest', - 'OperationMetadata', - 'LocationMetadata', - 'ZoneMetadata', - 'TlsCertificate', + "NodeInfo", + "Instance", + "PersistenceConfig", + "RescheduleMaintenanceRequest", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "GetInstanceAuthStringRequest", + "InstanceAuthString", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "UpgradeInstanceRequest", + "DeleteInstanceRequest", + "GcsSource", + "InputConfig", + "ImportInstanceRequest", + "GcsDestination", + "OutputConfig", + "ExportInstanceRequest", + "FailoverInstanceRequest", + "OperationMetadata", + "LocationMetadata", + "ZoneMetadata", + "TlsCertificate", }, ) +# fmt: on class NodeInfo(proto.Message): @@ -266,6 +268,7 @@ class Instance(proto.Message): Optional. The available maintenance versions that an instance could update to. """ + class State(proto.Enum): r"""Represents the different states of a Redis instance. @@ -481,34 +484,34 @@ class SuspensionReason(proto.Enum): proto.BOOL, number=23, ) - server_ca_certs: MutableSequence['TlsCertificate'] = proto.RepeatedField( + server_ca_certs: MutableSequence["TlsCertificate"] = proto.RepeatedField( proto.MESSAGE, number=25, - message='TlsCertificate', + message="TlsCertificate", ) transit_encryption_mode: TransitEncryptionMode = proto.Field( proto.ENUM, number=26, enum=TransitEncryptionMode, ) - maintenance_policy: 'MaintenancePolicy' = proto.Field( + maintenance_policy: "MaintenancePolicy" = proto.Field( proto.MESSAGE, number=27, - message='MaintenancePolicy', + message="MaintenancePolicy", ) - maintenance_schedule: 'MaintenanceSchedule' = proto.Field( + maintenance_schedule: "MaintenanceSchedule" = proto.Field( proto.MESSAGE, number=28, - message='MaintenanceSchedule', + message="MaintenanceSchedule", ) replica_count: int = proto.Field( proto.INT32, number=31, ) - nodes: MutableSequence['NodeInfo'] = proto.RepeatedField( + nodes: MutableSequence["NodeInfo"] = proto.RepeatedField( proto.MESSAGE, number=32, - message='NodeInfo', + message="NodeInfo", ) read_endpoint: str = proto.Field( proto.STRING, @@ -527,10 +530,10 @@ class SuspensionReason(proto.Enum): proto.STRING, number=36, ) - persistence_config: 'PersistenceConfig' = proto.Field( + persistence_config: "PersistenceConfig" = proto.Field( proto.MESSAGE, number=37, - message='PersistenceConfig', + message="PersistenceConfig", ) suspension_reasons: MutableSequence[SuspensionReason] = proto.RepeatedField( proto.ENUM, @@ -572,6 +575,7 @@ class PersistenceConfig(proto.Message): future snapshots will be aligned. If not provided, the current time will be used. """ + class PersistenceMode(proto.Enum): r"""Available Persistence modes. @@ -648,6 +652,7 @@ class RescheduleMaintenanceRequest(proto.Message): rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC 3339 format, for example ``2012-11-15T16:19:00.094Z``. """ + class RescheduleType(proto.Enum): r"""Reschedule options. @@ -720,10 +725,10 @@ class MaintenancePolicy(proto.Message): proto.STRING, number=3, ) - weekly_maintenance_window: MutableSequence['WeeklyMaintenanceWindow'] = proto.RepeatedField( + weekly_maintenance_window: MutableSequence["WeeklyMaintenanceWindow"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='WeeklyMaintenanceWindow', + message="WeeklyMaintenanceWindow", ) @@ -869,10 +874,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances: MutableSequence['Instance'] = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance', + message="Instance", ) next_page_token: str = proto.Field( proto.STRING, @@ -962,10 +967,10 @@ class CreateInstanceRequest(proto.Message): proto.STRING, number=2, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, - message='Instance', + message="Instance", ) @@ -995,10 +1000,10 @@ class UpdateInstanceRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, - message='Instance', + message="Instance", ) @@ -1071,11 +1076,11 @@ class InputConfig(proto.Message): This field is a member of `oneof`_ ``source``. """ - gcs_source: 'GcsSource' = proto.Field( + gcs_source: "GcsSource" = proto.Field( proto.MESSAGE, number=1, - oneof='source', - message='GcsSource', + oneof="source", + message="GcsSource", ) @@ -1096,10 +1101,10 @@ class ImportInstanceRequest(proto.Message): proto.STRING, number=1, ) - input_config: 'InputConfig' = proto.Field( + input_config: "InputConfig" = proto.Field( proto.MESSAGE, number=3, - message='InputConfig', + message="InputConfig", ) @@ -1132,11 +1137,11 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - gcs_destination: 'GcsDestination' = proto.Field( + gcs_destination: "GcsDestination" = proto.Field( proto.MESSAGE, number=1, - oneof='destination', - message='GcsDestination', + oneof="destination", + message="GcsDestination", ) @@ -1157,10 +1162,10 @@ class ExportInstanceRequest(proto.Message): proto.STRING, number=1, ) - output_config: 'OutputConfig' = proto.Field( + output_config: "OutputConfig" = proto.Field( proto.MESSAGE, number=3, - message='OutputConfig', + message="OutputConfig", ) @@ -1178,6 +1183,7 @@ class FailoverInstanceRequest(proto.Message): choose. If it's unspecified, data protection mode will be LIMITED_DATA_LOSS by default. """ + class DataProtectionMode(proto.Enum): r"""Specifies different modes of operation in relation to the data retention. @@ -1279,18 +1285,17 @@ class LocationMetadata(proto.Message): instance. """ - available_zones: MutableMapping[str, 'ZoneMetadata'] = proto.MapField( + available_zones: MutableMapping[str, "ZoneMetadata"] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, - message='ZoneMetadata', + message="ZoneMetadata", ) class ZoneMetadata(proto.Message): r"""Defines specific information for a particular zone. Currently empty and reserved for future use only. - """ diff --git a/tests/integration/goldens/redis/noxfile.py b/tests/integration/goldens/redis/noxfile.py index c6510a4be3..270887c38c 100755 --- a/tests/integration/goldens/redis/noxfile.py +++ b/tests/integration/goldens/redis/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -FORMAT_PATHS = ["google", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # We're most interested in ensuring that code is formatted properly diff --git a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ae66b5a92c..c025196a2a 100755 --- a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -993,8 +993,8 @@ def test_list_instances(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_instances(request) @@ -1006,8 +1006,8 @@ def test_list_instances(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_instances_non_empty_request_with_auto_populated_field(): @@ -1022,8 +1022,8 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1035,8 +1035,8 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_instances_use_cached_wrapped_rpc(): @@ -1122,8 +1122,8 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) response = await client.list_instances(request) @@ -1135,8 +1135,8 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -1152,7 +1152,7 @@ def test_list_instances_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1184,7 +1184,7 @@ async def test_list_instances_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1220,7 +1220,7 @@ def test_list_instances_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1228,7 +1228,7 @@ def test_list_instances_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1242,7 +1242,7 @@ def test_list_instances_flattened_error(): with pytest.raises(ValueError): client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -1262,7 +1262,7 @@ async def test_list_instances_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_instances( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1270,7 +1270,7 @@ async def test_list_instances_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -1284,7 +1284,7 @@ async def test_list_instances_flattened_error_async(): with pytest.raises(ValueError): await client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1504,33 +1504,33 @@ def test_get_instance(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) response = client.get_instance(request) @@ -1542,33 +1542,33 @@ def test_get_instance(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' + assert response.customer_managed_key == "customer_managed_key_value" assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == ["available_maintenance_versions_value"] def test_get_instance_non_empty_request_with_auto_populated_field(): @@ -1583,7 +1583,7 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.GetInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1595,7 +1595,7 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceRequest( - name='name_value', + name="name_value", ) def test_get_instance_use_cached_wrapped_rpc(): @@ -1681,33 +1681,33 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], )) response = await client.get_instance(request) @@ -1719,33 +1719,33 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' + assert response.customer_managed_key == "customer_managed_key_value" assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == ["available_maintenance_versions_value"] @pytest.mark.asyncio @@ -1761,7 +1761,7 @@ def test_get_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1793,7 +1793,7 @@ async def test_get_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1829,7 +1829,7 @@ def test_get_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1837,7 +1837,7 @@ def test_get_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -1851,7 +1851,7 @@ def test_get_instance_flattened_error(): with pytest.raises(ValueError): client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -1871,7 +1871,7 @@ async def test_get_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1879,7 +1879,7 @@ async def test_get_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -1893,7 +1893,7 @@ async def test_get_instance_flattened_error_async(): with pytest.raises(ValueError): await client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1917,7 +1917,7 @@ def test_get_instance_auth_string(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', + auth_string="auth_string_value", ) response = client.get_instance_auth_string(request) @@ -1929,7 +1929,7 @@ def test_get_instance_auth_string(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' + assert response.auth_string == "auth_string_value" def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): @@ -1944,7 +1944,7 @@ def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.GetInstanceAuthStringRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1956,7 +1956,7 @@ def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceAuthStringRequest( - name='name_value', + name="name_value", ) def test_get_instance_auth_string_use_cached_wrapped_rpc(): @@ -2042,7 +2042,7 @@ async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', r '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( - auth_string='auth_string_value', + auth_string="auth_string_value", )) response = await client.get_instance_auth_string(request) @@ -2054,7 +2054,7 @@ async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', r # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' + assert response.auth_string == "auth_string_value" @pytest.mark.asyncio @@ -2070,7 +2070,7 @@ def test_get_instance_auth_string_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceAuthStringRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2102,7 +2102,7 @@ async def test_get_instance_auth_string_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceAuthStringRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2138,7 +2138,7 @@ def test_get_instance_auth_string_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance_auth_string( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2146,7 +2146,7 @@ def test_get_instance_auth_string_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -2160,7 +2160,7 @@ def test_get_instance_auth_string_flattened_error(): with pytest.raises(ValueError): client.get_instance_auth_string( cloud_redis.GetInstanceAuthStringRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -2180,7 +2180,7 @@ async def test_get_instance_auth_string_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_instance_auth_string( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2188,7 +2188,7 @@ async def test_get_instance_auth_string_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -2202,7 +2202,7 @@ async def test_get_instance_auth_string_flattened_error_async(): with pytest.raises(ValueError): await client.get_instance_auth_string( cloud_redis.GetInstanceAuthStringRequest(), - name='name_value', + name="name_value", ) @@ -2250,8 +2250,8 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', + parent="parent_value", + instance_id="instance_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2263,8 +2263,8 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', + parent="parent_value", + instance_id="instance_id_value", ) def test_create_instance_use_cached_wrapped_rpc(): @@ -2387,7 +2387,7 @@ def test_create_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2419,7 +2419,7 @@ async def test_create_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2455,9 +2455,9 @@ def test_create_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2465,13 +2465,13 @@ def test_create_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].instance_id - mock_val = 'instance_id_value' + mock_val = "instance_id_value" assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @@ -2485,9 +2485,9 @@ def test_create_instance_flattened_error(): with pytest.raises(ValueError): client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) @pytest.mark.asyncio @@ -2509,9 +2509,9 @@ async def test_create_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2519,13 +2519,13 @@ async def test_create_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].instance_id - mock_val = 'instance_id_value' + mock_val = "instance_id_value" assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -2539,9 +2539,9 @@ async def test_create_instance_flattened_error_async(): with pytest.raises(ValueError): await client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) @@ -2722,7 +2722,7 @@ def test_update_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'name_value' + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2754,7 +2754,7 @@ async def test_update_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'name_value' + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2790,8 +2790,8 @@ def test_update_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2799,10 +2799,10 @@ def test_update_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @@ -2816,8 +2816,8 @@ def test_update_instance_flattened_error(): with pytest.raises(ValueError): client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) @pytest.mark.asyncio @@ -2839,8 +2839,8 @@ async def test_update_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2848,10 +2848,10 @@ async def test_update_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -2865,8 +2865,8 @@ async def test_update_instance_flattened_error_async(): with pytest.raises(ValueError): await client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) @@ -2914,8 +2914,8 @@ def test_upgrade_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.UpgradeInstanceRequest( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2927,8 +2927,8 @@ def test_upgrade_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpgradeInstanceRequest( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) def test_upgrade_instance_use_cached_wrapped_rpc(): @@ -3051,7 +3051,7 @@ def test_upgrade_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.UpgradeInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3083,7 +3083,7 @@ async def test_upgrade_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.UpgradeInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3119,8 +3119,8 @@ def test_upgrade_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.upgrade_instance( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) # Establish that the underlying call was made with the expected @@ -3128,10 +3128,10 @@ def test_upgrade_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].redis_version - mock_val = 'redis_version_value' + mock_val = "redis_version_value" assert arg == mock_val @@ -3145,8 +3145,8 @@ def test_upgrade_instance_flattened_error(): with pytest.raises(ValueError): client.upgrade_instance( cloud_redis.UpgradeInstanceRequest(), - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) @pytest.mark.asyncio @@ -3168,8 +3168,8 @@ async def test_upgrade_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.upgrade_instance( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) # Establish that the underlying call was made with the expected @@ -3177,10 +3177,10 @@ async def test_upgrade_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].redis_version - mock_val = 'redis_version_value' + mock_val = "redis_version_value" assert arg == mock_val @pytest.mark.asyncio @@ -3194,8 +3194,8 @@ async def test_upgrade_instance_flattened_error_async(): with pytest.raises(ValueError): await client.upgrade_instance( cloud_redis.UpgradeInstanceRequest(), - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) @@ -3243,7 +3243,7 @@ def test_import_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.ImportInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3255,7 +3255,7 @@ def test_import_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ImportInstanceRequest( - name='name_value', + name="name_value", ) def test_import_instance_use_cached_wrapped_rpc(): @@ -3378,7 +3378,7 @@ def test_import_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ImportInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3410,7 +3410,7 @@ async def test_import_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ImportInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3446,8 +3446,8 @@ def test_import_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.import_instance( - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri="uri_value")), ) # Establish that the underlying call was made with the expected @@ -3455,10 +3455,10 @@ def test_import_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].input_config - mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri="uri_value")) assert arg == mock_val @@ -3472,8 +3472,8 @@ def test_import_instance_flattened_error(): with pytest.raises(ValueError): client.import_instance( cloud_redis.ImportInstanceRequest(), - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri="uri_value")), ) @pytest.mark.asyncio @@ -3495,8 +3495,8 @@ async def test_import_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.import_instance( - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri="uri_value")), ) # Establish that the underlying call was made with the expected @@ -3504,10 +3504,10 @@ async def test_import_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].input_config - mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri="uri_value")) assert arg == mock_val @pytest.mark.asyncio @@ -3521,8 +3521,8 @@ async def test_import_instance_flattened_error_async(): with pytest.raises(ValueError): await client.import_instance( cloud_redis.ImportInstanceRequest(), - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri="uri_value")), ) @@ -3570,7 +3570,7 @@ def test_export_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.ExportInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3582,7 +3582,7 @@ def test_export_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ExportInstanceRequest( - name='name_value', + name="name_value", ) def test_export_instance_use_cached_wrapped_rpc(): @@ -3705,7 +3705,7 @@ def test_export_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ExportInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3737,7 +3737,7 @@ async def test_export_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ExportInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3773,8 +3773,8 @@ def test_export_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.export_instance( - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri="uri_value")), ) # Establish that the underlying call was made with the expected @@ -3782,10 +3782,10 @@ def test_export_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].output_config - mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri="uri_value")) assert arg == mock_val @@ -3799,8 +3799,8 @@ def test_export_instance_flattened_error(): with pytest.raises(ValueError): client.export_instance( cloud_redis.ExportInstanceRequest(), - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri="uri_value")), ) @pytest.mark.asyncio @@ -3822,8 +3822,8 @@ async def test_export_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.export_instance( - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri="uri_value")), ) # Establish that the underlying call was made with the expected @@ -3831,10 +3831,10 @@ async def test_export_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].output_config - mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri="uri_value")) assert arg == mock_val @pytest.mark.asyncio @@ -3848,8 +3848,8 @@ async def test_export_instance_flattened_error_async(): with pytest.raises(ValueError): await client.export_instance( cloud_redis.ExportInstanceRequest(), - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri="uri_value")), ) @@ -3897,7 +3897,7 @@ def test_failover_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.FailoverInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3909,7 +3909,7 @@ def test_failover_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.FailoverInstanceRequest( - name='name_value', + name="name_value", ) def test_failover_instance_use_cached_wrapped_rpc(): @@ -4032,7 +4032,7 @@ def test_failover_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.FailoverInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4064,7 +4064,7 @@ async def test_failover_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.FailoverInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4100,7 +4100,7 @@ def test_failover_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.failover_instance( - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) @@ -4109,7 +4109,7 @@ def test_failover_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].data_protection_mode mock_val = cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS @@ -4126,7 +4126,7 @@ def test_failover_instance_flattened_error(): with pytest.raises(ValueError): client.failover_instance( cloud_redis.FailoverInstanceRequest(), - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) @@ -4149,7 +4149,7 @@ async def test_failover_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.failover_instance( - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) @@ -4158,7 +4158,7 @@ async def test_failover_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].data_protection_mode mock_val = cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS @@ -4175,7 +4175,7 @@ async def test_failover_instance_flattened_error_async(): with pytest.raises(ValueError): await client.failover_instance( cloud_redis.FailoverInstanceRequest(), - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) @@ -4224,7 +4224,7 @@ def test_delete_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.DeleteInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4236,7 +4236,7 @@ def test_delete_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.DeleteInstanceRequest( - name='name_value', + name="name_value", ) def test_delete_instance_use_cached_wrapped_rpc(): @@ -4359,7 +4359,7 @@ def test_delete_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4391,7 +4391,7 @@ async def test_delete_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4427,7 +4427,7 @@ def test_delete_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4435,7 +4435,7 @@ def test_delete_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -4449,7 +4449,7 @@ def test_delete_instance_flattened_error(): with pytest.raises(ValueError): client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -4471,7 +4471,7 @@ async def test_delete_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4479,7 +4479,7 @@ async def test_delete_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -4493,7 +4493,7 @@ async def test_delete_instance_flattened_error_async(): with pytest.raises(ValueError): await client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -4541,7 +4541,7 @@ def test_reschedule_maintenance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.RescheduleMaintenanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4553,7 +4553,7 @@ def test_reschedule_maintenance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.RescheduleMaintenanceRequest( - name='name_value', + name="name_value", ) def test_reschedule_maintenance_use_cached_wrapped_rpc(): @@ -4676,7 +4676,7 @@ def test_reschedule_maintenance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.RescheduleMaintenanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4708,7 +4708,7 @@ async def test_reschedule_maintenance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.RescheduleMaintenanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4744,7 +4744,7 @@ def test_reschedule_maintenance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.reschedule_maintenance( - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -4754,7 +4754,7 @@ def test_reschedule_maintenance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].reschedule_type mock_val = cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE @@ -4772,7 +4772,7 @@ def test_reschedule_maintenance_flattened_error(): with pytest.raises(ValueError): client.reschedule_maintenance( cloud_redis.RescheduleMaintenanceRequest(), - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -4796,7 +4796,7 @@ async def test_reschedule_maintenance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.reschedule_maintenance( - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -4806,7 +4806,7 @@ async def test_reschedule_maintenance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val arg = args[0].reschedule_type mock_val = cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE @@ -4824,7 +4824,7 @@ async def test_reschedule_maintenance_flattened_error_async(): with pytest.raises(ValueError): await client.reschedule_maintenance( cloud_redis.RescheduleMaintenanceRequest(), - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -4883,7 +4883,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -4892,7 +4892,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4960,7 +4960,7 @@ def test_list_instances_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -4994,7 +4994,7 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -5113,14 +5113,14 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5188,7 +5188,7 @@ def test_get_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -5222,7 +5222,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -5279,14 +5279,14 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5354,7 +5354,7 @@ def test_get_instance_auth_string_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -5388,7 +5388,7 @@ def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance_auth_string( cloud_redis.GetInstanceAuthStringRequest(), - name='name_value', + name="name_value", ) @@ -5453,8 +5453,8 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns assert "instanceId" in jsonified_request assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -5463,9 +5463,9 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert jsonified_request["instanceId"] == "instance_id_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5535,9 +5535,9 @@ def test_create_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) mock_args.update(sample_request) @@ -5569,9 +5569,9 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) @@ -5702,8 +5702,8 @@ def test_update_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) mock_args.update(sample_request) @@ -5735,8 +5735,8 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) @@ -5798,17 +5798,17 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' - jsonified_request["redisVersion"] = 'redis_version_value' + jsonified_request["name"] = "name_value" + jsonified_request["redisVersion"] = "redis_version_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" assert "redisVersion" in jsonified_request - assert jsonified_request["redisVersion"] == 'redis_version_value' + assert jsonified_request["redisVersion"] == "redis_version_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5874,8 +5874,8 @@ def test_upgrade_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) mock_args.update(sample_request) @@ -5907,8 +5907,8 @@ def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.upgrade_instance( cloud_redis.UpgradeInstanceRequest(), - name='name_value', - redis_version='redis_version_value', + name="name_value", + redis_version="redis_version_value", ) @@ -5969,14 +5969,14 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6042,8 +6042,8 @@ def test_import_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri="uri_value")), ) mock_args.update(sample_request) @@ -6075,8 +6075,8 @@ def test_import_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.import_instance( cloud_redis.ImportInstanceRequest(), - name='name_value', - input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + name="name_value", + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri="uri_value")), ) @@ -6137,14 +6137,14 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6210,8 +6210,8 @@ def test_export_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri="uri_value")), ) mock_args.update(sample_request) @@ -6243,8 +6243,8 @@ def test_export_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.export_instance( cloud_redis.ExportInstanceRequest(), - name='name_value', - output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + name="name_value", + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri="uri_value")), ) @@ -6305,14 +6305,14 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6378,7 +6378,7 @@ def test_failover_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) mock_args.update(sample_request) @@ -6411,7 +6411,7 @@ def test_failover_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.failover_instance( cloud_redis.FailoverInstanceRequest(), - name='name_value', + name="name_value", data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) @@ -6473,14 +6473,14 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6545,7 +6545,7 @@ def test_delete_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -6577,7 +6577,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -6638,14 +6638,14 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6711,7 +6711,7 @@ def test_reschedule_maintenance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -6745,7 +6745,7 @@ def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.reschedule_maintenance( cloud_redis.RescheduleMaintenanceRequest(), - name='name_value', + name="name_value", reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, schedule_time=timestamp_pb2.Timestamp(seconds=751), ) @@ -7136,8 +7136,8 @@ async def test_list_instances_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) await client.list_instances(request=None) @@ -7164,33 +7164,33 @@ async def test_get_instance_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], )) await client.get_instance(request=None) @@ -7217,7 +7217,7 @@ async def test_get_instance_auth_string_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( - auth_string='auth_string_value', + auth_string="auth_string_value", )) await client.get_instance_auth_string(request=None) @@ -7492,8 +7492,8 @@ def test_list_instances_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -7510,8 +7510,8 @@ def test_list_instances_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -7600,33 +7600,33 @@ def test_get_instance_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) # Wrap the value into a proper Response obj @@ -7643,33 +7643,33 @@ def test_get_instance_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' + assert response.customer_managed_key == "customer_managed_key_value" assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == ["available_maintenance_versions_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -7758,7 +7758,7 @@ def test_get_instance_auth_string_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', + auth_string="auth_string_value", ) # Wrap the value into a proper Response obj @@ -7775,7 +7775,7 @@ def test_get_instance_auth_string_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' + assert response.auth_string == "auth_string_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -9450,8 +9450,8 @@ async def test_list_instances_rest_asyncio_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -9468,8 +9468,8 @@ async def test_list_instances_rest_asyncio_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -9565,33 +9565,33 @@ async def test_get_instance_rest_asyncio_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) # Wrap the value into a proper Response obj @@ -9608,33 +9608,33 @@ async def test_get_instance_rest_asyncio_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' + assert response.customer_managed_key == "customer_managed_key_value" assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == ["available_maintenance_versions_value"] @pytest.mark.asyncio @@ -9730,7 +9730,7 @@ async def test_get_instance_auth_string_rest_asyncio_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', + auth_string="auth_string_value", ) # Wrap the value into a proper Response obj @@ -9747,7 +9747,7 @@ async def test_get_instance_auth_string_rest_asyncio_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' + assert response.auth_string == "auth_string_value" @pytest.mark.asyncio diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py b/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py index 045bcae4c5..5ed4d010c5 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py @@ -36,20 +36,21 @@ from google.cloud.redis_v1.types.cloud_redis import UpdateInstanceRequest from google.cloud.redis_v1.types.cloud_redis import WeeklyMaintenanceWindow -__all__ = ('CloudRedisClient', - 'CloudRedisAsyncClient', - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'MaintenancePolicy', - 'MaintenanceSchedule', - 'NodeInfo', - 'OperationMetadata', - 'PersistenceConfig', - 'TlsCertificate', - 'UpdateInstanceRequest', - 'WeeklyMaintenanceWindow', +__all__ = ( + "CloudRedisClient", + "CloudRedisAsyncClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "PersistenceConfig", + "TlsCertificate", + "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", ) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py index 1f7bad3796..9841d08c71 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py @@ -46,10 +46,10 @@ from .types.cloud_redis import UpdateInstanceRequest from .types.cloud_redis import WeeklyMaintenanceWindow -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.redis_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore -else: # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.redis_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore +else: # pragma: NO COVER # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: @@ -59,20 +59,24 @@ _py_version_str = sys.version.split()[0] _package_label = "google.cloud.redis_v1" if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) def parse_version_to_tuple(version_string: str): """Safely converts a semantic version string to a comparable tuple of integers. @@ -110,41 +114,45 @@ def _get_version(dependency_name): _recommendation = " (we recommend 6.x)" (_version_used, _version_used_string) = _get_version(_dependency_package) if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) __all__ = ( - 'CloudRedisAsyncClient', -'CloudRedisClient', -'CreateInstanceRequest', -'DeleteInstanceRequest', -'GetInstanceRequest', -'Instance', -'ListInstancesRequest', -'ListInstancesResponse', -'MaintenancePolicy', -'MaintenanceSchedule', -'NodeInfo', -'OperationMetadata', -'PersistenceConfig', -'TlsCertificate', -'UpdateInstanceRequest', -'WeeklyMaintenanceWindow', + "CloudRedisAsyncClient", + "CloudRedisClient", + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "PersistenceConfig", + "TlsCertificate", + "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", ) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py index f302df64f1..916eb4e24c 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -17,6 +17,6 @@ from .async_client import CloudRedisAsyncClient __all__ = ( - 'CloudRedisClient', - 'CloudRedisAsyncClient', + "CloudRedisClient", + "CloudRedisAsyncClient", ) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py index 6e826449ce..5ecd895e9f 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -16,7 +16,18 @@ import logging as std_logging from collections import OrderedDict import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) from google.cloud.redis_v1 import gapic_version as package_version @@ -24,8 +35,8 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry_async as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf @@ -36,10 +47,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -49,12 +60,14 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) + class CloudRedisAsyncClient: """Configures and manages Cloud Memorystore for Redis instances @@ -198,12 +211,14 @@ def universe_domain(self) -> str: get_transport_class = CloudRedisClient.get_transport_class - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the cloud redis async client. Args: @@ -261,31 +276,33 @@ def __init__(self, *, transport=transport, client_options=client_options, client_info=client_info, - ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._client._transport, "_credentials") else { + } + if hasattr(self._client._transport, "_credentials") + else { "serviceName": "google.cloud.redis.v1.CloudRedis", "credentialsType": None, - } + }, ) - async def list_instances(self, - request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesAsyncPager: + async def list_instances( + self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesAsyncPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -360,8 +377,7 @@ async def sample_list_instances(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -379,11 +395,13 @@ async def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -410,14 +428,15 @@ async def sample_list_instances(): # Done; return the response. return response - async def get_instance(self, - request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.Instance: + async def get_instance( + self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. .. code-block:: python @@ -476,8 +495,7 @@ async def sample_get_instance(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -495,11 +513,13 @@ async def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -515,16 +535,17 @@ async def sample_get_instance(): # Done; return the response. return response - async def create_instance(self, - request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def create_instance( + self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -632,8 +653,7 @@ async def sample_create_instance(): flattened_params = [parent, instance_id, instance] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -655,11 +675,13 @@ async def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -683,15 +705,16 @@ async def sample_create_instance(): # Done; return the response. return response - async def update_instance(self, - request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def update_instance( + self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new @@ -783,8 +806,7 @@ async def sample_update_instance(): flattened_params = [update_mask, instance] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -804,11 +826,13 @@ async def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("instance.name", request.instance.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -832,14 +856,15 @@ async def sample_update_instance(): # Done; return the response. return response - async def delete_instance(self, - request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: + async def delete_instance( + self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -915,8 +940,7 @@ async def sample_delete_instance(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -934,11 +958,13 @@ async def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._client._validate_universe_domain() @@ -999,17 +1025,18 @@ async def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1051,17 +1078,18 @@ async def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1107,16 +1135,18 @@ async def delete_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def cancel_operation( self, @@ -1158,16 +1188,18 @@ async def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. - await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) async def wait_operation( self, @@ -1212,17 +1244,18 @@ async def wait_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1264,17 +1297,18 @@ async def get_location( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1316,17 +1350,18 @@ async def list_locations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._client._validate_universe_domain() # Send the request. response = await rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1337,12 +1372,11 @@ async def __aenter__(self) -> "CloudRedisAsyncClient": async def __aexit__(self, exc_type, exc, tb): await self.transport.close() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "CloudRedisAsyncClient", -) +__all__ = ("CloudRedisAsyncClient",) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py index 32c43e15f1..736ba0373f 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -19,7 +19,19 @@ import logging as std_logging import os import re -from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import warnings from google.cloud.redis_v1 import gapic_version as package_version @@ -28,11 +40,11 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf try: @@ -42,6 +54,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -50,10 +63,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -61,10 +74,12 @@ from .transports.grpc import CloudRedisGrpcTransport from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .transports.rest import CloudRedisRestTransport + try: from .transports.rest_asyncio import AsyncCloudRedisRestTransport + HAS_ASYNC_REST_DEPENDENCIES = True -except ImportError as e: # pragma: NO COVER +except ImportError as e: # pragma: NO COVER HAS_ASYNC_REST_DEPENDENCIES = False ASYNC_REST_EXCEPTION = e @@ -76,6 +91,7 @@ class CloudRedisClientMeta(type): support objects (e.g. transport) without polluting the client instance objects. """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport @@ -83,9 +99,10 @@ class CloudRedisClientMeta(type): if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[CloudRedisTransport]: + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[CloudRedisTransport]: """Returns an appropriate transport class. Args: @@ -144,9 +161,7 @@ def _get_default_mtls_endpoint(api_endpoint): if not api_endpoint: return api_endpoint - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) + mtls_endpoint_re = re.compile(r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?") m = mtls_endpoint_re.match(api_endpoint) name, mtls, sandbox, googledomain = m.groups() @@ -155,16 +170,15 @@ def _get_default_mtls_endpoint(api_endpoint): if sandbox: return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + "sandbox.googleapis.com", + "mtls.sandbox.googleapis.com", ) return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "redis.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(DEFAULT_ENDPOINT) # type: ignore _DEFAULT_ENDPOINT_TEMPLATE = "redis.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" @@ -178,21 +192,19 @@ def _use_client_cert_effective(): Returns: bool: whether client certificate should be used for mTLS + Raises: - ValueError: (If using a version of google-auth without should_use_client_cert and - GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + ValueError: If using a version of google-auth without should_use_client_cert + and GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value. """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER return mtls.should_use_client_cert() - else: # pragma: NO COVER + else: # pragma: NO COVER # if unsupported, fallback to reading from env var use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() if use_client_cert_str not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" - " either `true` or `false`" - ) + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") return use_client_cert_str == "true" @classmethod @@ -227,7 +239,8 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): CloudRedisClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( - filename) + filename, + ) kwargs["credentials"] = credentials return cls(*args, **kwargs) @@ -244,73 +257,118 @@ def transport(self) -> CloudRedisTransport: return self._transport @staticmethod - def instance_path(project: str,location: str,instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" - return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + return "projects/{project}/locations/{location}/instances/{instance}".format( + project=project, + location=location, + instance=instance, + ) @staticmethod - def parse_instance_path(path: str) -> Dict[str,str]: + def parse_instance_path( + path: str, + ) -> Dict[str, str]: """Parses a instance path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: + def parse_common_billing_account_path( + path: str, + ) -> Dict[str, str]: """Parse a billing_account path into its component segments.""" m = re.match(r"^billingAccounts/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str, ) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: + def parse_common_folder_path( + path: str, + ) -> Dict[str, str]: """Parse a folder path into its component segments.""" m = re.match(r"^folders/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str, ) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: + def parse_common_organization_path( + path: str, + ) -> Dict[str, str]: """Parse a organization path into its component segments.""" m = re.match(r"^organizations/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str, ) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) + return "projects/{project}".format( + project=project, + ) @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: + def parse_common_project_path( + path: str, + ) -> Dict[str, str]: """Parse a project path into its component segments.""" m = re.match(r"^projects/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str, ) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: + def parse_common_location_path( + path: str, + ) -> Dict[str, str]: """Parse a location path into its component segments.""" m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + def get_mtls_endpoint_and_cert_source( + cls, + client_options: Optional[client_options_lib.ClientOptions] = None, + ): """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: @@ -342,8 +400,10 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", - DeprecationWarning) + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = CloudRedisClient._use_client_cert_effective() @@ -473,7 +533,7 @@ def _validate_universe_domain(self): def _add_cred_info_for_auth_errors( self, - error: core_exceptions.GoogleAPICallError + error: core_exceptions.GoogleAPICallError, ) -> None: """Adds credential info string to error details for 401/403/404 errors. @@ -513,12 +573,14 @@ def universe_domain(self) -> str: """ return self._universe_domain - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiates the cloud redis client. Args: @@ -578,12 +640,12 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + universe_domain_opt = getattr(self._client_options, "universe_domain", None) self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CloudRedisClient._read_environment_variables() self._client_cert_source = CloudRedisClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) self._universe_domain = CloudRedisClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) - self._api_endpoint = None # updated below, depending on `transport` + self._api_endpoint = None # updated below, depending on `transport` # Initialize the universe domain validation. self._is_universe_domain_valid = False @@ -603,22 +665,22 @@ def __init__(self, *, if transport_provided: # transport is a CloudRedisTransport instance. if credentials or self._client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") + raise ValueError( + "When providing a transport instance, provide its credentials directly.", + ) if self._client_options.scopes: raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." + "When providing a transport instance, provide its scopes directly.", ) self._transport = cast(CloudRedisTransport, transport) self._api_endpoint = self._transport.host - self._api_endpoint = (self._api_endpoint or - CloudRedisClient._get_api_endpoint( - self._client_options.api_endpoint, - self._client_cert_source, - self._universe_domain, - self._use_mtls_endpoint)) + self._api_endpoint = self._api_endpoint or CloudRedisClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) if not transport_provided: transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( @@ -634,7 +696,6 @@ def __init__(self, *, "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, - } provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] if provided_unsupported_params: @@ -670,25 +731,28 @@ def __init__(self, *, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisClient`.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - } if hasattr(self._transport, "_credentials") else { + } + if hasattr(self._transport, "_credentials") + else { "serviceName": "google.cloud.redis.v1.CloudRedis", "credentialsType": None, - } + }, ) - def list_instances(self, - request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListInstancesPager: + def list_instances( + self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -763,8 +827,7 @@ def sample_list_instances(): flattened_params = [parent] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -781,11 +844,13 @@ def sample_list_instances(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -812,14 +877,15 @@ def sample_list_instances(): # Done; return the response. return response - def get_instance(self, - request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> cloud_redis.Instance: + def get_instance( + self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. .. code-block:: python @@ -878,8 +944,7 @@ def sample_get_instance(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -896,11 +961,13 @@ def sample_get_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -916,16 +983,17 @@ def sample_get_instance(): # Done; return the response. return response - def create_instance(self, - request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, - *, - parent: Optional[str] = None, - instance_id: Optional[str] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def create_instance( + self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -1033,8 +1101,7 @@ def sample_create_instance(): flattened_params = [parent, instance_id, instance] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1055,11 +1122,13 @@ def sample_create_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("parent", request.parent), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1083,15 +1152,16 @@ def sample_create_instance(): # Done; return the response. return response - def update_instance(self, - request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, - *, - update_mask: Optional[field_mask_pb2.FieldMask] = None, - instance: Optional[cloud_redis.Instance] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def update_instance( + self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Updates the metadata and configuration of a specific Redis instance. Completed longrunning.Operation will contain the new @@ -1183,8 +1253,7 @@ def sample_update_instance(): flattened_params = [update_mask, instance] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1203,11 +1272,13 @@ def sample_update_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("instance.name", request.instance.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1231,14 +1302,15 @@ def sample_update_instance(): # Done; return the response. return response - def delete_instance(self, - request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: + def delete_instance( + self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1314,8 +1386,7 @@ def sample_delete_instance(): flattened_params = [name] has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of the individual field arguments should be set.") # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1332,11 +1403,13 @@ def sample_delete_instance(): # Certain fields should be provided within the metadata header; # add these here. + # fmt: off metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ("name", request.name), )), ) + # fmt: on # Validate the universe domain. self._validate_universe_domain() @@ -1410,10 +1483,7 @@ def list_operations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1421,7 +1491,11 @@ def list_operations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1466,10 +1540,7 @@ def get_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1477,7 +1548,11 @@ def get_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1526,16 +1601,18 @@ def delete_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def cancel_operation( self, @@ -1577,16 +1654,18 @@ def cancel_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() # Send the request. - rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) def wait_operation( self, @@ -1631,10 +1710,7 @@ def wait_operation( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1642,7 +1718,11 @@ def wait_operation( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1687,10 +1767,7 @@ def get_location( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1698,7 +1775,11 @@ def get_location( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1743,10 +1824,7 @@ def list_locations( # Certain fields should be provided within the metadata header; # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("name", request.name),)), - ) + metadata = tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),) # Validate the universe domain. self._validate_universe_domain() @@ -1754,7 +1832,11 @@ def list_locations( try: # Send the request. response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1768,6 +1850,4 @@ def list_locations( if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ -__all__ = ( - "CloudRedisClient", -) +__all__ = ("CloudRedisClient",) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py index 4e0e4cb323..db7bea6d75 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -17,6 +17,7 @@ from google.api_core import retry as retries from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] @@ -44,14 +45,17 @@ class ListInstancesPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., cloud_redis.ListInstancesResponse], - request: cloud_redis.ListInstancesRequest, - response: cloud_redis.ListInstancesResponse, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., cloud_redis.ListInstancesResponse], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiate the pager. Args: @@ -92,7 +96,7 @@ def __iter__(self) -> Iterator[cloud_redis.Instance]: yield from page.instances def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) class ListInstancesAsyncPager: @@ -112,14 +116,17 @@ class ListInstancesAsyncPager: attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ - def __init__(self, - method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], - request: cloud_redis.ListInstancesRequest, - response: cloud_redis.ListInstancesResponse, - *, - retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + + def __init__( + self, + method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): """Instantiates the pager. Args: @@ -154,6 +161,7 @@ async def pages(self) -> AsyncIterator[cloud_redis.ListInstancesResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response + def __aiter__(self) -> AsyncIterator[cloud_redis.Instance]: async def async_generator(): async for page in self.pages: @@ -163,4 +171,4 @@ async def async_generator(): return async_generator() def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 1cbbf54c25..5d26ad11b0 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -21,11 +21,13 @@ from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .rest import CloudRedisRestTransport from .rest import CloudRedisRestInterceptor + ASYNC_REST_CLASSES: Tuple[str, ...] try: from .rest_asyncio import AsyncCloudRedisRestTransport from .rest_asyncio import AsyncCloudRedisRestInterceptor - ASYNC_REST_CLASSES = ('AsyncCloudRedisRestTransport', 'AsyncCloudRedisRestInterceptor') + + ASYNC_REST_CLASSES = ("AsyncCloudRedisRestTransport", "AsyncCloudRedisRestInterceptor") HAS_REST_ASYNC = True except ImportError: # pragma: NO COVER ASYNC_REST_CLASSES = () @@ -34,16 +36,16 @@ # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] -_transport_registry['grpc'] = CloudRedisGrpcTransport -_transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport -_transport_registry['rest'] = CloudRedisRestTransport +_transport_registry["grpc"] = CloudRedisGrpcTransport +_transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport +_transport_registry["rest"] = CloudRedisRestTransport if HAS_REST_ASYNC: # pragma: NO COVER - _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport + _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport __all__ = ( - 'CloudRedisTransport', - 'CloudRedisGrpcTransport', - 'CloudRedisGrpcAsyncIOTransport', - 'CloudRedisRestTransport', - 'CloudRedisRestInterceptor', + "CloudRedisTransport", + "CloudRedisGrpcTransport", + "CloudRedisGrpcAsyncIOTransport", + "CloudRedisRestTransport", + "CloudRedisRestInterceptor", ) + ASYNC_REST_CLASSES diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 8e1decc081..00740f609d 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -25,12 +25,12 @@ from google.api_core import retry as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore +from google.oauth2 import service_account # type: ignore import google.protobuf -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -41,29 +41,32 @@ class CloudRedisTransport(abc.ABC): """Abstract transport class for CloudRedis.""" + # fmt: off AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', + "https://www.googleapis.com/auth/cloud-platform", ) + # fmt: on - DEFAULT_HOST: str = 'redis.googleapis.com' + DEFAULT_HOST: str = "redis.googleapis.com" def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -99,10 +102,10 @@ def __init__( if credentials_file is not None: credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id, + ) elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. @@ -110,15 +113,19 @@ def __init__( credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' + if ":" not in host: + host += ":443" self._host = host @property @@ -188,12 +195,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - } + } def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ @@ -204,50 +211,75 @@ def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() + # fmt: off @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - Union[ - cloud_redis.ListInstancesResponse, - Awaitable[cloud_redis.ListInstancesResponse] - ]]: + def list_instances( + self, + ) -> Callable[ + [cloud_redis.ListInstancesRequest], + Union[ + cloud_redis.ListInstancesResponse, + Awaitable[cloud_redis.ListInstancesResponse] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - Union[ - cloud_redis.Instance, - Awaitable[cloud_redis.Instance] - ]]: + def get_instance( + self, + ) -> Callable[ + [cloud_redis.GetInstanceRequest], + Union[ + cloud_redis.Instance, + Awaitable[cloud_redis.Instance] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def create_instance( + self, + ) -> Callable[ + [cloud_redis.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def update_instance( + self, + ) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on + # fmt: off @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - Union[ - operations_pb2.Operation, - Awaitable[operations_pb2.Operation] - ]]: + def delete_instance( + self, + ) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ], + ]: raise NotImplementedError() + # fmt: on @property def list_operations( @@ -261,53 +293,37 @@ def list_operations( @property def get_operation( self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.GetOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property def cancel_operation( self, - ) -> Callable[ - [operations_pb2.CancelOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: raise NotImplementedError() @property def delete_operation( self, - ) -> Callable[ - [operations_pb2.DeleteOperationRequest], - None, - ]: + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: raise NotImplementedError() @property def wait_operation( self, - ) -> Callable[ - [operations_pb2.WaitOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: + ) -> Callable[[operations_pb2.WaitOperationRequest], Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]]]: raise NotImplementedError() @property - def get_location(self, - ) -> Callable[ - [locations_pb2.GetLocationRequest], - Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], - ]: + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], Union[locations_pb2.Location, Awaitable[locations_pb2.Location]]]: raise NotImplementedError() @property - def list_locations(self, - ) -> Callable[ - [locations_pb2.ListLocationsRequest], - Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], - ]: + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]]]: raise NotImplementedError() @property @@ -315,6 +331,4 @@ def kind(self) -> str: raise NotImplementedError() -__all__ = ( - 'CloudRedisTransport', -) +__all__ = ("CloudRedisTransport",) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index fe65a8baf8..76f9355e68 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -22,7 +22,7 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.api_core import gapic_v1 -import google.auth # type: ignore +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson @@ -31,13 +31,14 @@ import grpc # type: ignore import proto # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -57,10 +58,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -68,7 +71,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -94,7 +97,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): } _LOGGER.debug( f"Received response for {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": client_call_details.method, "response": grpc_response, @@ -136,28 +139,31 @@ class CloudRedisGrpcTransport(CloudRedisTransport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -232,7 +238,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -241,7 +248,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -276,19 +284,21 @@ def __init__(self, *, ) self._interceptor = _LoggingClientInterceptor() - self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod - def create_channel(cls, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: + def create_channel( + cls, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: """Create and return a gRPC channel object. Args: host (Optional[str]): The host for the channel to use. @@ -324,13 +334,12 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -342,17 +351,15 @@ def operations_client(self) -> operations_v1.OperationsClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[[cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse]: r"""Return a callable for the list instances method over gRPC. Lists all Redis instances owned by a project in either the @@ -376,18 +383,18 @@ def list_instances(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ListInstances', + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ListInstances", request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, ) - return self._stubs['list_instances'] + return self._stubs["list_instances"] @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: r"""Return a callable for the get instance method over gRPC. Gets the details of a specific Redis instance. @@ -402,18 +409,18 @@ def get_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstance', + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstance", request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, ) - return self._stubs['get_instance'] + return self._stubs["get_instance"] @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the create instance method over gRPC. Creates a Redis instance based on the specified tier and memory @@ -441,18 +448,18 @@ def create_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/CreateInstance', + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/CreateInstance", request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_instance'] + return self._stubs["create_instance"] @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the update instance method over gRPC. Updates the metadata and configuration of a specific @@ -472,18 +479,18 @@ def update_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpdateInstance", request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_instance'] + return self._stubs["update_instance"] @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: r"""Return a callable for the delete instance method over gRPC. Deletes a specific Redis instance. Instance stops @@ -499,13 +506,13 @@ def delete_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/DeleteInstance", request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_instance'] + return self._stubs["delete_instance"] def close(self): self._logged_channel.close() @@ -514,8 +521,7 @@ def close(self): def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -532,8 +538,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -550,8 +555,7 @@ def cancel_operation( def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -568,8 +572,7 @@ def wait_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -586,8 +589,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -604,8 +606,7 @@ def list_operations( def list_locations( self, ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -622,8 +623,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -641,6 +641,4 @@ def kind(self) -> str: return "grpc" -__all__ = ( - 'CloudRedisGrpcTransport', -) +__all__ = ("CloudRedisGrpcTransport",) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index a3393990f2..7b00aa511a 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -25,23 +25,24 @@ from google.api_core import exceptions as core_exceptions from google.api_core import retry_async as retries from google.api_core import operations_v1 -from google.auth import credentials as ga_credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message -import grpc # type: ignore -import proto # type: ignore +import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO from .grpc import CloudRedisGrpcTransport try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -61,10 +62,12 @@ async def intercept_unary_unary(self, continuation, client_call_details, request else: request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + # fmt: off request_metadata = { key: value.decode("utf-8") if isinstance(value, bytes) else value for key, value in request_metadata } + # fmt: on grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -72,7 +75,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Sending request for {client_call_details.method}", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "request": grpc_request, @@ -98,7 +101,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request } _LOGGER.debug( f"Received response to rpc {client_call_details.method}.", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": str(client_call_details.method), "response": grpc_response, @@ -145,13 +148,15 @@ class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): _stubs: Dict[str, Callable] = {} @classmethod - def create_channel(cls, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: + def create_channel( + cls, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: host (Optional[str]): The host for the channel to use. @@ -182,29 +187,31 @@ def create_channel(cls, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, - **kwargs + **kwargs, ) - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -279,7 +286,8 @@ def __init__(self, *, if client_cert_source: cert, key = client_cert_source() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials @@ -288,7 +296,8 @@ def __init__(self, *, if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key + certificate_chain=cert, + private_key=key, ) # The base transport sets the host, credentials and scopes @@ -348,17 +357,15 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: """ # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: - self._operations_client = operations_v1.OperationsAsyncClient( - self._logged_channel - ) + self._operations_client = operations_v1.OperationsAsyncClient(self._logged_channel) # Return the client from cache. return self._operations_client @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - Awaitable[cloud_redis.ListInstancesResponse]]: + def list_instances( + self, + ) -> Callable[[cloud_redis.ListInstancesRequest], Awaitable[cloud_redis.ListInstancesResponse]]: r"""Return a callable for the list instances method over gRPC. Lists all Redis instances owned by a project in either the @@ -382,18 +389,18 @@ def list_instances(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/ListInstances', + if "list_instances" not in self._stubs: + self._stubs["list_instances"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/ListInstances", request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, ) - return self._stubs['list_instances'] + return self._stubs["list_instances"] @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - Awaitable[cloud_redis.Instance]]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], Awaitable[cloud_redis.Instance]]: r"""Return a callable for the get instance method over gRPC. Gets the details of a specific Redis instance. @@ -408,18 +415,18 @@ def get_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/GetInstance', + if "get_instance" not in self._stubs: + self._stubs["get_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/GetInstance", request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, ) - return self._stubs['get_instance'] + return self._stubs["get_instance"] @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create instance method over gRPC. Creates a Redis instance based on the specified tier and memory @@ -447,18 +454,18 @@ def create_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/CreateInstance', + if "create_instance" not in self._stubs: + self._stubs["create_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/CreateInstance", request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['create_instance'] + return self._stubs["create_instance"] @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the update instance method over gRPC. Updates the metadata and configuration of a specific @@ -478,18 +485,18 @@ def update_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + if "update_instance" not in self._stubs: + self._stubs["update_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/UpdateInstance", request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['update_instance'] + return self._stubs["update_instance"] @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - Awaitable[operations_pb2.Operation]]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete instance method over gRPC. Deletes a specific Redis instance. Instance stops @@ -505,16 +512,16 @@ def delete_instance(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self._logged_channel.unary_unary( - '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + if "delete_instance" not in self._stubs: + self._stubs["delete_instance"] = self._logged_channel.unary_unary( + "/google.cloud.redis.v1.CloudRedis/DeleteInstance", request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['delete_instance'] + return self._stubs["delete_instance"] def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_instances: self._wrap_method( self.list_instances, @@ -594,8 +601,7 @@ def kind(self) -> str: def delete_operation( self, ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: - r"""Return a callable for the delete_operation method over gRPC. - """ + r"""Return a callable for the delete_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -612,8 +618,7 @@ def delete_operation( def cancel_operation( self, ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC. - """ + r"""Return a callable for the cancel_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -630,8 +635,7 @@ def cancel_operation( def wait_operation( self, ) -> Callable[[operations_pb2.WaitOperationRequest], None]: - r"""Return a callable for the wait_operation method over gRPC. - """ + r"""Return a callable for the wait_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -648,8 +652,7 @@ def wait_operation( def get_operation( self, ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC. - """ + r"""Return a callable for the get_operation method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -666,8 +669,7 @@ def get_operation( def list_operations( self, ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: - r"""Return a callable for the list_operations method over gRPC. - """ + r"""Return a callable for the list_operations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -684,8 +686,7 @@ def list_operations( def list_locations( self, ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -702,8 +703,7 @@ def list_locations( def get_location( self, ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: - r"""Return a callable for the list locations method over gRPC. - """ + r"""Return a callable for the list locations method over gRPC.""" # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need @@ -717,6 +717,4 @@ def get_location( return self._stubs["get_location"] -__all__ = ( - 'CloudRedisGrpcAsyncIOTransport', -) +__all__ = ("CloudRedisGrpcAsyncIOTransport",) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 7e43d25df4..ce55b9f00c 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -27,7 +27,7 @@ from google.protobuf import json_format from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses @@ -49,6 +49,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -125,7 +126,12 @@ def post_update_instance(self, response): """ - def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + def pre_create_instance( + self, + request: cloud_redis.CreateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -133,7 +139,10 @@ def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metada """ return request, metadata - def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_create_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance DEPRECATED. Please use the `post_create_instance_with_metadata` @@ -146,7 +155,11 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_instance Override in a subclass to read or manipulate the response or metadata after it @@ -161,7 +174,11 @@ def post_create_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_delete_instance( + self, + request: cloud_redis.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -169,7 +186,10 @@ def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metada """ return request, metadata - def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_delete_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance DEPRECATED. Please use the `post_delete_instance_with_metadata` @@ -182,7 +202,11 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_instance Override in a subclass to read or manipulate the response or metadata after it @@ -197,7 +221,11 @@ def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, """ return response, metadata - def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_get_instance( + self, + request: cloud_redis.GetInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -205,7 +233,10 @@ def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Se """ return request, metadata - def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + def post_get_instance( + self, + response: cloud_redis.Instance, + ) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance DEPRECATED. Please use the `post_get_instance_with_metadata` @@ -218,7 +249,11 @@ def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Insta """ return response - def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_get_instance_with_metadata( + self, + response: cloud_redis.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance Override in a subclass to read or manipulate the response or metadata after it @@ -233,7 +268,11 @@ def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metada """ return response, metadata - def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_list_instances( + self, + request: cloud_redis.ListInstancesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -241,7 +280,10 @@ def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata """ return request, metadata - def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + def post_list_instances( + self, + response: cloud_redis.ListInstancesResponse, + ) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances DEPRECATED. Please use the `post_list_instances_with_metadata` @@ -254,7 +296,11 @@ def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cl """ return response - def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_list_instances_with_metadata( + self, + response: cloud_redis.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_instances Override in a subclass to read or manipulate the response or metadata after it @@ -269,7 +315,11 @@ def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesR """ return response, metadata - def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + def pre_update_instance( + self, + request: cloud_redis.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -277,7 +327,10 @@ def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metada """ return request, metadata - def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + def post_update_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance DEPRECATED. Please use the `post_update_instance_with_metadata` @@ -290,7 +343,11 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations """ return response - def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_instance Override in a subclass to read or manipulate the response or metadata after it @@ -306,7 +363,9 @@ def post_update_instance_with_metadata(self, response: operations_pb2.Operation, return response, metadata def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location @@ -316,7 +375,8 @@ def pre_get_location( return request, metadata def post_get_location( - self, response: locations_pb2.Location + self, + response: locations_pb2.Location, ) -> locations_pb2.Location: """Post-rpc interceptor for get_location @@ -327,7 +387,9 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations @@ -337,7 +399,8 @@ def pre_list_locations( return request, metadata def post_list_locations( - self, response: locations_pb2.ListLocationsResponse + self, + response: locations_pb2.ListLocationsResponse, ) -> locations_pb2.ListLocationsResponse: """Post-rpc interceptor for list_locations @@ -348,7 +411,9 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation @@ -358,7 +423,8 @@ def pre_cancel_operation( return request, metadata def post_cancel_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for cancel_operation @@ -369,7 +435,9 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation @@ -379,7 +447,8 @@ def pre_delete_operation( return request, metadata def post_delete_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for delete_operation @@ -390,7 +459,9 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation @@ -400,7 +471,8 @@ def pre_get_operation( return request, metadata def post_get_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -411,7 +483,9 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations @@ -421,7 +495,8 @@ def pre_list_operations( return request, metadata def post_list_operations( - self, response: operations_pb2.ListOperationsResponse + self, + response: operations_pb2.ListOperationsResponse, ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -432,7 +507,9 @@ def post_list_operations( return response def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for wait_operation @@ -442,7 +519,8 @@ def pre_wait_operation( return request, metadata def post_wait_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for wait_operation @@ -492,29 +570,30 @@ class CloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[CloudRedisRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudRedisRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -553,10 +632,9 @@ def __init__(self, *, client_info=client_info, always_use_jwt_access=always_use_jwt_access, url_scheme=url_scheme, - api_audience=api_audience + api_audience=api_audience, ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) + self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -573,46 +651,47 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], - 'google.longrunning.Operations.WaitOperation': [ + "google.longrunning.Operations.WaitOperation": [ { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", }, ], } rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -631,27 +710,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -685,21 +766,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "httpRequest": http_request, @@ -708,7 +789,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -728,13 +817,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.create_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "metadata": http_response["headers"], @@ -755,26 +844,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -806,21 +897,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "httpRequest": http_request, @@ -829,7 +920,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -849,13 +947,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.delete_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "metadata": http_response["headers"], @@ -876,26 +974,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.Instance: + def __call__( + self, + request: cloud_redis.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. Args: @@ -924,21 +1024,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "httpRequest": http_request, @@ -947,7 +1047,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -969,13 +1076,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "metadata": http_response["headers"], @@ -996,26 +1103,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.ListInstancesResponse: + def __call__( + self, + request: cloud_redis.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -1046,21 +1155,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "httpRequest": http_request, @@ -1069,7 +1178,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1091,13 +1207,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.list_instances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "metadata": http_response["headers"], @@ -1118,27 +1234,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + def __call__( + self, + request: cloud_redis.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -1172,21 +1290,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "httpRequest": http_request, @@ -1195,7 +1313,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1215,13 +1341,13 @@ def __call__(self, except: response_payload = None http_response = { - "payload": response_payload, - "headers": dict(response.headers), - "status": response.status_code, + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisClient.update_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "metadata": http_response["headers"], @@ -1230,49 +1356,59 @@ def __call__(self, ) return resp + # fmt: off @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self + ) -> Callable[[cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + # fmt: on + # fmt: off @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + # fmt: on @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): def __hash__(self): @@ -1286,27 +1422,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -1333,21 +1470,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpRequest": http_request, @@ -1356,7 +1493,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1374,12 +1518,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpResponse": http_response, @@ -1390,7 +1534,7 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): def __hash__(self): @@ -1404,27 +1548,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -1451,21 +1596,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpRequest": http_request, @@ -1474,7 +1619,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1492,12 +1644,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpResponse": http_response, @@ -1508,7 +1660,7 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): def __hash__(self): @@ -1522,27 +1674,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -1566,21 +1719,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -1589,7 +1742,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1600,7 +1760,7 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): def __hash__(self): @@ -1614,27 +1774,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -1658,21 +1819,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -1681,7 +1842,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1692,7 +1860,7 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): def __hash__(self): @@ -1706,27 +1874,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -1753,21 +1922,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpRequest": http_request, @@ -1776,7 +1945,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1794,12 +1970,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpResponse": http_response, @@ -1810,7 +1986,7 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): def __hash__(self): @@ -1824,27 +2000,28 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -1871,21 +2048,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpRequest": http_request, @@ -1894,7 +2071,14 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = CloudRedisRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1912,12 +2096,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpResponse": http_response, @@ -1928,7 +2112,7 @@ def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): def __hash__(self): @@ -1942,28 +2126,29 @@ def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - def __call__(self, - request: operations_pb2.WaitOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. Args: @@ -1992,21 +2177,21 @@ def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpRequest": http_request, @@ -2015,7 +2200,15 @@ def __call__(self, ) # Send the request - response = CloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = CloudRedisRestTransport._WaitOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2033,12 +2226,12 @@ def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpResponse": http_response, @@ -2055,6 +2248,4 @@ def close(self): self._session.close() -__all__=( - 'CloudRedisRestTransport', -) +__all__ = ("CloudRedisRestTransport",) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index fdb993eada..108cd1dec8 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -15,20 +15,23 @@ # import google.auth + try: - import aiohttp # type: ignore - from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore - from google.api_core import rest_streaming_async # type: ignore - from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore except ImportError as e: # pragma: NO COVER - raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`") from e + raise ImportError( + "`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`" + ) from e from google.auth.aio import credentials as ga_credentials_async # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.api_core import retry_async as retries from google.api_core import rest_helpers from google.api_core import rest_streaming_async # type: ignore @@ -36,7 +39,7 @@ from google.protobuf import json_format from google.api_core import operations_v1 -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore import json # type: ignore import dataclasses @@ -56,6 +59,7 @@ try: from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False @@ -137,7 +141,12 @@ async def post_update_instance(self, response): """ - async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + + async def pre_create_instance( + self, + request: cloud_redis.CreateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -145,7 +154,10 @@ async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, """ return request, metadata - async def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_create_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance DEPRECATED. Please use the `post_create_instance_with_metadata` @@ -158,7 +170,11 @@ async def post_create_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_create_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for create_instance Override in a subclass to read or manipulate the response or metadata after it @@ -173,7 +189,11 @@ async def post_create_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_delete_instance( + self, + request: cloud_redis.DeleteInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -181,7 +201,10 @@ async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, """ return request, metadata - async def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_delete_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance DEPRECATED. Please use the `post_delete_instance_with_metadata` @@ -194,7 +217,11 @@ async def post_delete_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_delete_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for delete_instance Override in a subclass to read or manipulate the response or metadata after it @@ -209,7 +236,11 @@ async def post_delete_instance_with_metadata(self, response: operations_pb2.Oper """ return response, metadata - async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_get_instance( + self, + request: cloud_redis.GetInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -217,7 +248,10 @@ async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metada """ return request, metadata - async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + async def post_get_instance( + self, + response: cloud_redis.Instance, + ) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance DEPRECATED. Please use the `post_get_instance_with_metadata` @@ -230,7 +264,11 @@ async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis """ return response - async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_get_instance_with_metadata( + self, + response: cloud_redis.Instance, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for get_instance Override in a subclass to read or manipulate the response or metadata after it @@ -245,7 +283,11 @@ async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, """ return response, metadata - async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_list_instances( + self, + request: cloud_redis.ListInstancesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -253,7 +295,10 @@ async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, me """ return request, metadata - async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + async def post_list_instances( + self, + response: cloud_redis.ListInstancesResponse, + ) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances DEPRECATED. Please use the `post_list_instances_with_metadata` @@ -266,7 +311,11 @@ async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) """ return response - async def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_list_instances_with_metadata( + self, + response: cloud_redis.ListInstancesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for list_instances Override in a subclass to read or manipulate the response or metadata after it @@ -281,7 +330,11 @@ async def post_list_instances_with_metadata(self, response: cloud_redis.ListInst """ return response, metadata - async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + async def pre_update_instance( + self, + request: cloud_redis.UpdateInstanceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -289,7 +342,10 @@ async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, """ return request, metadata - async def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + async def post_update_instance( + self, + response: operations_pb2.Operation, + ) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance DEPRECATED. Please use the `post_update_instance_with_metadata` @@ -302,7 +358,11 @@ async def post_update_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + async def post_update_instance_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: """Post-rpc interceptor for update_instance Override in a subclass to read or manipulate the response or metadata after it @@ -318,7 +378,9 @@ async def post_update_instance_with_metadata(self, response: operations_pb2.Oper return response, metadata async def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location @@ -328,7 +390,8 @@ async def pre_get_location( return request, metadata async def post_get_location( - self, response: locations_pb2.Location + self, + response: locations_pb2.Location, ) -> locations_pb2.Location: """Post-rpc interceptor for get_location @@ -339,7 +402,9 @@ async def post_get_location( return response async def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations @@ -349,7 +414,8 @@ async def pre_list_locations( return request, metadata async def post_list_locations( - self, response: locations_pb2.ListLocationsResponse + self, + response: locations_pb2.ListLocationsResponse, ) -> locations_pb2.ListLocationsResponse: """Post-rpc interceptor for list_locations @@ -360,7 +426,9 @@ async def post_list_locations( return response async def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation @@ -370,7 +438,8 @@ async def pre_cancel_operation( return request, metadata async def post_cancel_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for cancel_operation @@ -381,7 +450,9 @@ async def post_cancel_operation( return response async def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation @@ -391,7 +462,8 @@ async def pre_delete_operation( return request, metadata async def post_delete_operation( - self, response: None + self, + response: None, ) -> None: """Post-rpc interceptor for delete_operation @@ -402,7 +474,9 @@ async def post_delete_operation( return response async def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation @@ -412,7 +486,8 @@ async def pre_get_operation( return request, metadata async def post_get_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for get_operation @@ -423,7 +498,9 @@ async def post_get_operation( return response async def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations @@ -433,7 +510,8 @@ async def pre_list_operations( return request, metadata async def post_list_operations( - self, response: operations_pb2.ListOperationsResponse + self, + response: operations_pb2.ListOperationsResponse, ) -> operations_pb2.ListOperationsResponse: """Post-rpc interceptor for list_operations @@ -444,7 +522,9 @@ async def post_list_operations( return response async def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + self, + request: operations_pb2.WaitOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for wait_operation @@ -454,7 +534,8 @@ async def pre_wait_operation( return request, metadata async def post_wait_operation( - self, response: operations_pb2.Operation + self, + response: operations_pb2.Operation, ) -> operations_pb2.Operation: """Post-rpc interceptor for wait_operation @@ -471,6 +552,7 @@ class AsyncCloudRedisRestStub: _host: str _interceptor: AsyncCloudRedisRestInterceptor + class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): """Asynchronous REST backend transport for CloudRedis. @@ -502,23 +584,25 @@ class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, - *, - host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials_async.Credentials] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - url_scheme: str = 'https', - interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, - ) -> None: + + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[ga_credentials_async.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + url_scheme: str = "https", + interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, + ) -> None: """Instantiate the transport. - NOTE: This async REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! + NOTE: This async REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[google.auth.aio.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -540,7 +624,7 @@ def __init__(self, client_info=client_info, always_use_jwt_access=False, url_scheme=url_scheme, - api_audience=None + api_audience=None, ) self._session = AsyncAuthorizedSession(self._credentials) # type: ignore self._interceptor = interceptor or AsyncCloudRedisRestInterceptor() @@ -549,7 +633,7 @@ def __init__(self, self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { self.list_instances: self._wrap_method( self.list_instances, @@ -630,27 +714,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.CreateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. Args: @@ -684,21 +770,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "httpRequest": http_request, @@ -707,15 +793,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._CreateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -733,12 +827,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", "metadata": http_response["headers"], @@ -760,26 +854,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.DeleteInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. Args: @@ -811,21 +907,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "httpRequest": http_request, @@ -834,15 +930,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -860,12 +963,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", "metadata": http_response["headers"], @@ -887,26 +990,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.Instance: + async def __call__( + self, + request: cloud_redis.GetInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. Args: @@ -935,21 +1040,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "httpRequest": http_request, @@ -958,15 +1063,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -984,12 +1096,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", "metadata": http_response["headers"], @@ -1011,26 +1123,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> cloud_redis.ListInstancesResponse: + async def __call__( + self, + request: cloud_redis.ListInstancesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. Args: @@ -1061,21 +1175,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = type(request).to_json(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "httpRequest": http_request, @@ -1084,15 +1198,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListInstances._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1110,12 +1231,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", "metadata": http_response["headers"], @@ -1137,27 +1258,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: + async def __call__( + self, + request: cloud_redis.UpdateInstanceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. Args: @@ -1191,21 +1314,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "httpRequest": http_request, @@ -1214,15 +1337,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # Return the response @@ -1240,12 +1371,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), - "status": "OK", # need to obtain this properly + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", "metadata": http_response["headers"], @@ -1265,45 +1396,45 @@ def operations_client(self) -> AsyncOperationsRestClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { - 'google.longrunning.Operations.CancelOperation': [ + "google.longrunning.Operations.CancelOperation": [ { - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", }, ], - 'google.longrunning.Operations.DeleteOperation': [ + "google.longrunning.Operations.DeleteOperation": [ { - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.GetOperation': [ + "google.longrunning.Operations.GetOperation": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", }, ], - 'google.longrunning.Operations.ListOperations': [ + "google.longrunning.Operations.ListOperations": [ { - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", }, ], - 'google.longrunning.Operations.WaitOperation': [ + "google.longrunning.Operations.WaitOperation": [ { - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", }, ], } rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore - host=self._host, - # use the credentials which are saved - credentials=self._credentials, # type: ignore - http_options=http_options, - path_prefix="v1" + host=self._host, + # use the credentials which are saved + credentials=self._credentials, # type: ignore + http_options=http_options, + path_prefix="v1", ) self._operations_client = AsyncOperationsRestClient(transport=rest_transport) @@ -1312,38 +1443,38 @@ def operations_client(self) -> AsyncOperationsRestClient: return self._operations_client @property - def create_instance(self) -> Callable[ - [cloud_redis.CreateInstanceRequest], - operations_pb2.Operation]: + def create_instance( + self, + ) -> Callable[[cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore @property - def delete_instance(self) -> Callable[ - [cloud_redis.DeleteInstanceRequest], - operations_pb2.Operation]: + def delete_instance( + self, + ) -> Callable[[cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore @property - def get_instance(self) -> Callable[ - [cloud_redis.GetInstanceRequest], - cloud_redis.Instance]: + def get_instance( + self, + ) -> Callable[[cloud_redis.GetInstanceRequest], cloud_redis.Instance]: return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore @property - def list_instances(self) -> Callable[ - [cloud_redis.ListInstancesRequest], - cloud_redis.ListInstancesResponse]: + def list_instances( + self, + ) -> Callable[[cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse]: return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore @property - def update_instance(self) -> Callable[ - [cloud_redis.UpdateInstanceRequest], - operations_pb2.Operation]: + def update_instance( + self, + ) -> Callable[[cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): def __hash__(self): @@ -1357,27 +1488,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: locations_pb2.GetLocationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.Location: - + async def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: r"""Call the get location method over HTTP. Args: @@ -1404,21 +1536,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpRequest": http_request, @@ -1427,15 +1559,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -1449,12 +1588,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", "httpResponse": http_response, @@ -1465,7 +1604,7 @@ async def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): def __hash__(self): @@ -1479,27 +1618,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: locations_pb2.ListLocationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> locations_pb2.ListLocationsResponse: - + async def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. Args: @@ -1526,21 +1666,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpRequest": http_request, @@ -1549,15 +1689,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -1571,12 +1718,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", "httpResponse": http_response, @@ -1587,7 +1734,7 @@ async def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -1601,27 +1748,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.CancelOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + async def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the cancel operation method over HTTP. Args: @@ -1645,21 +1793,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", "httpRequest": http_request, @@ -1668,22 +1816,29 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore return await self._interceptor.post_cancel_operation(None) @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -1697,27 +1852,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.DeleteOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> None: - + async def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: r"""Call the delete operation method over HTTP. Args: @@ -1741,21 +1897,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", "httpRequest": http_request, @@ -1764,22 +1920,29 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore return await self._interceptor.post_delete_operation(None) @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -1793,27 +1956,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.GetOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + async def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. Args: @@ -1840,21 +2004,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpRequest": http_request, @@ -1863,15 +2027,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -1885,12 +2056,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", "httpResponse": http_response, @@ -1901,7 +2072,7 @@ async def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): def __hash__(self): @@ -1915,27 +2086,28 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + ) return response - async def __call__(self, - request: operations_pb2.ListOperationsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.ListOperationsResponse: - + async def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. Args: @@ -1962,21 +2134,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpRequest": http_request, @@ -1985,15 +2157,22 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + response = await AsyncCloudRedisRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -2007,12 +2186,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", "httpResponse": http_response, @@ -2023,7 +2202,7 @@ async def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2037,28 +2216,29 @@ async def _get_response( session, timeout, transcoded_request, - body=None): - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] headers = dict(metadata) - headers['Content-Type'] = 'application/json' + headers["Content-Type"] = "application/json" response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), data=body, - ) + ) return response - async def __call__(self, - request: operations_pb2.WaitOperationRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), - ) -> operations_pb2.Operation: - + async def __call__( + self, + request: operations_pb2.WaitOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. Args: @@ -2087,21 +2267,21 @@ async def __call__(self, query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] try: request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { - "payload": request_payload, - "requestMethod": method, - "requestUrl": request_url, - "headers": dict(metadata), + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpRequest": http_request, @@ -2110,15 +2290,23 @@ async def __call__(self, ) # Send the request - response = await AsyncCloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + response = await AsyncCloudRedisRestTransport._WaitOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: content = await response.read() - payload = json.loads(content.decode('utf-8')) - request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) - method = transcoded_request['method'] + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request["uri"]) + method = transcoded_request["method"] raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore content = await response.read() @@ -2132,12 +2320,12 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": dict(response.headers), + "headers": dict(response.headers), "status": response.status_code, } _LOGGER.debug( "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", - extra = { + extra={ "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", "httpResponse": http_response, diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index 85b3522cb2..9242115905 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -18,7 +18,7 @@ from google.api_core import gapic_v1 from google.protobuf import json_format -from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO import re @@ -42,18 +42,20 @@ class _BaseCloudRedisRestTransport(CloudRedisTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, - host: str = 'redis.googleapis.com', - credentials: Optional[Any] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - api_audience: Optional[str] = None, - ) -> None: + def __init__( + self, + *, + host: str = "redis.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: """Instantiate the transport. Args: host (Optional[str]): - The hostname to connect to (default: 'redis.googleapis.com'). + The hostname to connect to (default: "redis.googleapis.com"). credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -84,15 +86,18 @@ def __init__(self, *, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience + api_audience=api_audience, ) class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId": "", + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -100,11 +105,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + "body": "instance", + }, ] return http_options @@ -119,16 +125,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) return query_params @@ -137,8 +146,10 @@ class _BaseDeleteInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -146,10 +157,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -161,10 +173,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) return query_params @@ -173,8 +187,10 @@ class _BaseGetInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -182,10 +198,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}", + }, ] return http_options @@ -197,10 +214,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) return query_params @@ -209,8 +228,10 @@ class _BaseListInstances: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -218,10 +239,11 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/instances", + }, ] return http_options @@ -233,10 +255,12 @@ def _get_transcoded_request(http_options, request): @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseListInstances._get_unset_required_fields(query_params)) return query_params @@ -245,8 +269,11 @@ class _BaseUpdateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + # fmt: off + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + # fmt: on @classmethod def _get_unset_required_fields(cls, message_dict): @@ -254,11 +281,12 @@ def _get_unset_required_fields(cls, message_dict): @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{instance.name=projects/*/locations/*/instances/*}", + "body": "instance", + }, ] return http_options @@ -273,16 +301,19 @@ def _get_request_body_json(transcoded_request): # Jsonify the request body body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False + transcoded_request["body"], + use_integers_for_enums=False, ) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) query_params.update(_BaseCloudRedisRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) return query_params @@ -293,10 +324,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @@ -304,12 +336,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListLocations: @@ -318,10 +352,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @@ -329,12 +364,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseCancelOperation: @@ -343,10 +380,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + }, ] return http_options @@ -354,12 +392,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseDeleteOperation: @@ -368,10 +408,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -379,12 +420,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseGetOperation: @@ -393,10 +436,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -404,12 +448,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseListOperations: @@ -418,10 +464,11 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options @@ -429,12 +476,14 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params class _BaseWaitOperation: @@ -443,11 +492,12 @@ def __hash__(self): # pragma: NO COVER @staticmethod def _get_http_options(): - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', - 'body': '*', - }, + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v2/{name=projects/*/locations/*/operations/*}:wait", + "body": "*", + }, ] return http_options @@ -455,19 +505,20 @@ def _get_http_options(): def _get_transcoded_request(http_options, request): request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode( - http_options, **request_kwargs) + http_options, + **request_kwargs, + ) return transcoded_request @staticmethod def _get_request_body_json(transcoded_request): - body = json.dumps(transcoded_request['body']) + body = json.dumps(transcoded_request["body"]) return body + @staticmethod def _get_query_params_json(transcoded_request): - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = json.loads(json.dumps(transcoded_request["query_params"])) return query_params -__all__=( - '_BaseCloudRedisRestTransport', -) +__all__ = ("_BaseCloudRedisRestTransport",) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py index 1e420395cc..3828d7a37c 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py @@ -31,18 +31,18 @@ ) __all__ = ( - 'CreateInstanceRequest', - 'DeleteInstanceRequest', - 'GetInstanceRequest', - 'Instance', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'MaintenancePolicy', - 'MaintenanceSchedule', - 'NodeInfo', - 'OperationMetadata', - 'PersistenceConfig', - 'TlsCertificate', - 'UpdateInstanceRequest', - 'WeeklyMaintenanceWindow', + "CreateInstanceRequest", + "DeleteInstanceRequest", + "GetInstanceRequest", + "Instance", + "ListInstancesRequest", + "ListInstancesResponse", + "MaintenancePolicy", + "MaintenanceSchedule", + "NodeInfo", + "OperationMetadata", + "PersistenceConfig", + "TlsCertificate", + "UpdateInstanceRequest", + "WeeklyMaintenanceWindow", ) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py index 016aa43a64..2b7aeeaa0c 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py @@ -26,25 +26,27 @@ from google.type import timeofday_pb2 # type: ignore +# fmt: off __protobuf__ = proto.module( - package='google.cloud.redis.v1', + package="google.cloud.redis.v1", manifest={ - 'NodeInfo', - 'Instance', - 'PersistenceConfig', - 'MaintenancePolicy', - 'WeeklyMaintenanceWindow', - 'MaintenanceSchedule', - 'ListInstancesRequest', - 'ListInstancesResponse', - 'GetInstanceRequest', - 'CreateInstanceRequest', - 'UpdateInstanceRequest', - 'DeleteInstanceRequest', - 'OperationMetadata', - 'TlsCertificate', + "NodeInfo", + "Instance", + "PersistenceConfig", + "MaintenancePolicy", + "WeeklyMaintenanceWindow", + "MaintenanceSchedule", + "ListInstancesRequest", + "ListInstancesResponse", + "GetInstanceRequest", + "CreateInstanceRequest", + "UpdateInstanceRequest", + "DeleteInstanceRequest", + "OperationMetadata", + "TlsCertificate", }, ) +# fmt: on class NodeInfo(proto.Message): @@ -253,6 +255,7 @@ class Instance(proto.Message): Optional. The available maintenance versions that an instance could update to. """ + class State(proto.Enum): r"""Represents the different states of a Redis instance. @@ -468,34 +471,34 @@ class SuspensionReason(proto.Enum): proto.BOOL, number=23, ) - server_ca_certs: MutableSequence['TlsCertificate'] = proto.RepeatedField( + server_ca_certs: MutableSequence["TlsCertificate"] = proto.RepeatedField( proto.MESSAGE, number=25, - message='TlsCertificate', + message="TlsCertificate", ) transit_encryption_mode: TransitEncryptionMode = proto.Field( proto.ENUM, number=26, enum=TransitEncryptionMode, ) - maintenance_policy: 'MaintenancePolicy' = proto.Field( + maintenance_policy: "MaintenancePolicy" = proto.Field( proto.MESSAGE, number=27, - message='MaintenancePolicy', + message="MaintenancePolicy", ) - maintenance_schedule: 'MaintenanceSchedule' = proto.Field( + maintenance_schedule: "MaintenanceSchedule" = proto.Field( proto.MESSAGE, number=28, - message='MaintenanceSchedule', + message="MaintenanceSchedule", ) replica_count: int = proto.Field( proto.INT32, number=31, ) - nodes: MutableSequence['NodeInfo'] = proto.RepeatedField( + nodes: MutableSequence["NodeInfo"] = proto.RepeatedField( proto.MESSAGE, number=32, - message='NodeInfo', + message="NodeInfo", ) read_endpoint: str = proto.Field( proto.STRING, @@ -514,10 +517,10 @@ class SuspensionReason(proto.Enum): proto.STRING, number=36, ) - persistence_config: 'PersistenceConfig' = proto.Field( + persistence_config: "PersistenceConfig" = proto.Field( proto.MESSAGE, number=37, - message='PersistenceConfig', + message="PersistenceConfig", ) suspension_reasons: MutableSequence[SuspensionReason] = proto.RepeatedField( proto.ENUM, @@ -559,6 +562,7 @@ class PersistenceConfig(proto.Message): future snapshots will be aligned. If not provided, the current time will be used. """ + class PersistenceMode(proto.Enum): r"""Available Persistence modes. @@ -652,10 +656,10 @@ class MaintenancePolicy(proto.Message): proto.STRING, number=3, ) - weekly_maintenance_window: MutableSequence['WeeklyMaintenanceWindow'] = proto.RepeatedField( + weekly_maintenance_window: MutableSequence["WeeklyMaintenanceWindow"] = proto.RepeatedField( proto.MESSAGE, number=4, - message='WeeklyMaintenanceWindow', + message="WeeklyMaintenanceWindow", ) @@ -801,10 +805,10 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances: MutableSequence['Instance'] = proto.RepeatedField( + instances: MutableSequence["Instance"] = proto.RepeatedField( proto.MESSAGE, number=1, - message='Instance', + message="Instance", ) next_page_token: str = proto.Field( proto.STRING, @@ -863,10 +867,10 @@ class CreateInstanceRequest(proto.Message): proto.STRING, number=2, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=3, - message='Instance', + message="Instance", ) @@ -896,10 +900,10 @@ class UpdateInstanceRequest(proto.Message): number=1, message=field_mask_pb2.FieldMask, ) - instance: 'Instance' = proto.Field( + instance: "Instance" = proto.Field( proto.MESSAGE, number=2, - message='Instance', + message="Instance", ) diff --git a/tests/integration/goldens/redis_selective/noxfile.py b/tests/integration/goldens/redis_selective/noxfile.py index c6510a4be3..270887c38c 100755 --- a/tests/integration/goldens/redis_selective/noxfile.py +++ b/tests/integration/goldens/redis_selective/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -FORMAT_PATHS = ["google", "tests"] +FORMAT_PATHS = ["tests"] LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # We're most interested in ensuring that code is formatted properly diff --git a/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py index 44403c6627..31c20d64f1 100755 --- a/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -993,8 +993,8 @@ def test_list_instances(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_instances(request) @@ -1006,8 +1006,8 @@ def test_list_instances(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] def test_list_instances_non_empty_request_with_auto_populated_field(): @@ -1022,8 +1022,8 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1035,8 +1035,8 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ListInstancesRequest( - parent='parent_value', - page_token='page_token_value', + parent="parent_value", + page_token="page_token_value", ) def test_list_instances_use_cached_wrapped_rpc(): @@ -1122,8 +1122,8 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) response = await client.list_instances(request) @@ -1135,8 +1135,8 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -1152,7 +1152,7 @@ def test_list_instances_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1184,7 +1184,7 @@ async def test_list_instances_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1220,7 +1220,7 @@ def test_list_instances_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_instances( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1228,7 +1228,7 @@ def test_list_instances_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @@ -1242,7 +1242,7 @@ def test_list_instances_flattened_error(): with pytest.raises(ValueError): client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) @pytest.mark.asyncio @@ -1262,7 +1262,7 @@ async def test_list_instances_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_instances( - parent='parent_value', + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1270,7 +1270,7 @@ async def test_list_instances_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio @@ -1284,7 +1284,7 @@ async def test_list_instances_flattened_error_async(): with pytest.raises(ValueError): await client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -1504,33 +1504,33 @@ def test_get_instance(request_type, transport: str = 'grpc'): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) response = client.get_instance(request) @@ -1542,33 +1542,33 @@ def test_get_instance(request_type, transport: str = 'grpc'): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' + assert response.customer_managed_key == "customer_managed_key_value" assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == ["available_maintenance_versions_value"] def test_get_instance_non_empty_request_with_auto_populated_field(): @@ -1583,7 +1583,7 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.GetInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1595,7 +1595,7 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceRequest( - name='name_value', + name="name_value", ) def test_get_instance_use_cached_wrapped_rpc(): @@ -1681,33 +1681,33 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], )) response = await client.get_instance(request) @@ -1719,33 +1719,33 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' + assert response.customer_managed_key == "customer_managed_key_value" assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == ["available_maintenance_versions_value"] @pytest.mark.asyncio @@ -1761,7 +1761,7 @@ def test_get_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1793,7 +1793,7 @@ async def test_get_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1829,7 +1829,7 @@ def test_get_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1837,7 +1837,7 @@ def test_get_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -1851,7 +1851,7 @@ def test_get_instance_flattened_error(): with pytest.raises(ValueError): client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -1871,7 +1871,7 @@ async def test_get_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1879,7 +1879,7 @@ async def test_get_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -1893,7 +1893,7 @@ async def test_get_instance_flattened_error_async(): with pytest.raises(ValueError): await client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -1941,8 +1941,8 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', + parent="parent_value", + instance_id="instance_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1954,8 +1954,8 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.CreateInstanceRequest( - parent='parent_value', - instance_id='instance_id_value', + parent="parent_value", + instance_id="instance_id_value", ) def test_create_instance_use_cached_wrapped_rpc(): @@ -2078,7 +2078,7 @@ def test_create_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2110,7 +2110,7 @@ async def test_create_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent_value' + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2146,9 +2146,9 @@ def test_create_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2156,13 +2156,13 @@ def test_create_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].instance_id - mock_val = 'instance_id_value' + mock_val = "instance_id_value" assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @@ -2176,9 +2176,9 @@ def test_create_instance_flattened_error(): with pytest.raises(ValueError): client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) @pytest.mark.asyncio @@ -2200,9 +2200,9 @@ async def test_create_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_instance( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2210,13 +2210,13 @@ async def test_create_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent - mock_val = 'parent_value' + mock_val = "parent_value" assert arg == mock_val arg = args[0].instance_id - mock_val = 'instance_id_value' + mock_val = "instance_id_value" assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -2230,9 +2230,9 @@ async def test_create_instance_flattened_error_async(): with pytest.raises(ValueError): await client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) @@ -2413,7 +2413,7 @@ def test_update_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'name_value' + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2445,7 +2445,7 @@ async def test_update_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'name_value' + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2481,8 +2481,8 @@ def test_update_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2490,10 +2490,10 @@ def test_update_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @@ -2507,8 +2507,8 @@ def test_update_instance_flattened_error(): with pytest.raises(ValueError): client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) @pytest.mark.asyncio @@ -2530,8 +2530,8 @@ async def test_update_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_instance( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -2539,10 +2539,10 @@ async def test_update_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val arg = args[0].instance - mock_val = cloud_redis.Instance(name='name_value') + mock_val = cloud_redis.Instance(name="name_value") assert arg == mock_val @pytest.mark.asyncio @@ -2556,8 +2556,8 @@ async def test_update_instance_flattened_error_async(): with pytest.raises(ValueError): await client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) @@ -2605,7 +2605,7 @@ def test_delete_instance_non_empty_request_with_auto_populated_field(): # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. request = cloud_redis.DeleteInstanceRequest( - name='name_value', + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2617,7 +2617,7 @@ def test_delete_instance_non_empty_request_with_auto_populated_field(): call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.DeleteInstanceRequest( - name='name_value', + name="name_value", ) def test_delete_instance_use_cached_wrapped_rpc(): @@ -2740,7 +2740,7 @@ def test_delete_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2772,7 +2772,7 @@ async def test_delete_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name_value' + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2808,7 +2808,7 @@ def test_delete_instance_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2816,7 +2816,7 @@ def test_delete_instance_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @@ -2830,7 +2830,7 @@ def test_delete_instance_flattened_error(): with pytest.raises(ValueError): client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @pytest.mark.asyncio @@ -2852,7 +2852,7 @@ async def test_delete_instance_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_instance( - name='name_value', + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2860,7 +2860,7 @@ async def test_delete_instance_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name - mock_val = 'name_value' + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio @@ -2874,7 +2874,7 @@ async def test_delete_instance_flattened_error_async(): with pytest.raises(ValueError): await client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -2931,7 +2931,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = "parent_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -2940,7 +2940,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3008,7 +3008,7 @@ def test_list_instances_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', + parent="parent_value", ) mock_args.update(sample_request) @@ -3042,7 +3042,7 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.list_instances( cloud_redis.ListInstancesRequest(), - parent='parent_value', + parent="parent_value", ) @@ -3161,14 +3161,14 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3236,7 +3236,7 @@ def test_get_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -3270,7 +3270,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.get_instance( cloud_redis.GetInstanceRequest(), - name='name_value', + name="name_value", ) @@ -3335,8 +3335,8 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns assert "instanceId" in jsonified_request assert jsonified_request["instanceId"] == request_init["instance_id"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["instanceId"] = 'instance_id_value' + jsonified_request["parent"] = "parent_value" + jsonified_request["instanceId"] = "instance_id_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. @@ -3345,9 +3345,9 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns # verify required fields with non-default values are left alone assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert jsonified_request["parent"] == "parent_value" assert "instanceId" in jsonified_request - assert jsonified_request["instanceId"] == 'instance_id_value' + assert jsonified_request["instanceId"] == "instance_id_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3417,9 +3417,9 @@ def test_create_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) mock_args.update(sample_request) @@ -3451,9 +3451,9 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.create_instance( cloud_redis.CreateInstanceRequest(), - parent='parent_value', - instance_id='instance_id_value', - instance=cloud_redis.Instance(name='name_value'), + parent="parent_value", + instance_id="instance_id_value", + instance=cloud_redis.Instance(name="name_value"), ) @@ -3584,8 +3584,8 @@ def test_update_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) mock_args.update(sample_request) @@ -3617,8 +3617,8 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.update_instance( cloud_redis.UpdateInstanceRequest(), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - instance=cloud_redis.Instance(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + instance=cloud_redis.Instance(name="name_value"), ) @@ -3679,14 +3679,14 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["name"] = "name_value" unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert jsonified_request["name"] == "name_value" client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3751,7 +3751,7 @@ def test_delete_instance_rest_flattened(): # get truthy value for each flattened field mock_args = dict( - name='name_value', + name="name_value", ) mock_args.update(sample_request) @@ -3783,7 +3783,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): with pytest.raises(ValueError): client.delete_instance( cloud_redis.DeleteInstanceRequest(), - name='name_value', + name="name_value", ) @@ -4034,8 +4034,8 @@ async def test_list_instances_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], )) await client.list_instances(request=None) @@ -4062,33 +4062,33 @@ async def test_get_instance_empty_call_grpc_asyncio(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], )) await client.get_instance(request=None) @@ -4228,8 +4228,8 @@ def test_list_instances_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -4246,8 +4246,8 @@ def test_list_instances_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -4336,33 +4336,33 @@ def test_get_instance_rest_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) # Wrap the value into a proper Response obj @@ -4379,33 +4379,33 @@ def test_get_instance_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' + assert response.customer_managed_key == "customer_managed_key_value" assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == ["available_maintenance_versions_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -5443,8 +5443,8 @@ async def test_list_instances_rest_asyncio_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -5461,8 +5461,8 @@ async def test_list_instances_rest_asyncio_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio @@ -5558,33 +5558,33 @@ async def test_get_instance_rest_asyncio_call_success(request_type): with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', + name="name_value", + display_name="display_name_value", + location_id="location_id_value", + alternative_location_id="alternative_location_id_value", + redis_version="redis_version_value", + reserved_ip_range="reserved_ip_range_value", + secondary_ip_range="secondary_ip_range_value", + host="host_value", port=453, - current_location_id='current_location_id_value', + current_location_id="current_location_id_value", state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', + status_message="status_message_value", tier=cloud_redis.Instance.Tier.BASIC, memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', + authorized_network="authorized_network_value", + persistence_iam_identity="persistence_iam_identity_value", connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, auth_enabled=True, transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, replica_count=1384, - read_endpoint='read_endpoint_value', + read_endpoint="read_endpoint_value", read_endpoint_port=1920, read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', + customer_managed_key="customer_managed_key_value", suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], + maintenance_version="maintenance_version_value", + available_maintenance_versions=["available_maintenance_versions_value"], ) # Wrap the value into a proper Response obj @@ -5601,33 +5601,33 @@ async def test_get_instance_rest_asyncio_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.location_id == "location_id_value" + assert response.alternative_location_id == "alternative_location_id_value" + assert response.redis_version == "redis_version_value" + assert response.reserved_ip_range == "reserved_ip_range_value" + assert response.secondary_ip_range == "secondary_ip_range_value" + assert response.host == "host_value" assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' + assert response.current_location_id == "current_location_id_value" assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' + assert response.status_message == "status_message_value" assert response.tier == cloud_redis.Instance.Tier.BASIC assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.authorized_network == "authorized_network_value" + assert response.persistence_iam_identity == "persistence_iam_identity_value" assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING assert response.auth_enabled is True assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint == "read_endpoint_value" assert response.read_endpoint_port == 1920 assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' + assert response.customer_managed_key == "customer_managed_key_value" assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + assert response.maintenance_version == "maintenance_version_value" + assert response.available_maintenance_versions == ["available_maintenance_versions_value"] @pytest.mark.asyncio diff --git a/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py b/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py index d217b7b36e..687a2d003c 100644 --- a/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py +++ b/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py @@ -44,7 +44,7 @@ async def sample_method_bidi_streaming(): ) # This method expects an iterator which contains - # 'mollusca_v1.SignatureRequestOneRequiredField' objects + # "mollusca_v1.SignatureRequestOneRequiredField" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py b/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py index 5fe826cddf..93c4b5fe90 100644 --- a/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py +++ b/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py @@ -44,7 +44,7 @@ def sample_method_bidi_streaming(): ) # This method expects an iterator which contains - # 'mollusca_v1.SignatureRequestOneRequiredField' objects + # "mollusca_v1.SignatureRequestOneRequiredField" objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/tests/unit/schema/test_metadata.py b/tests/unit/schema/test_metadata.py index 0d1d1c8032..b05b51ee42 100644 --- a/tests/unit/schema/test_metadata.py +++ b/tests/unit/schema/test_metadata.py @@ -132,7 +132,7 @@ def test_address_rel(): addr.rel( metadata.Address(package=("foo", "bar"), module="baz"), ) - == "'Bacon'" + == '"Bacon"' ) @@ -165,7 +165,7 @@ def test_address_rel_later(): name="Ham", package=("foo", "bar"), ) - assert addr.rel(other) == "'Bacon'" + assert addr.rel(other) == '"Bacon"' def test_address_rel_nested_sibling(): @@ -175,7 +175,7 @@ def test_address_rel_nested_sibling(): other = metadata.Address( module="baz", name="Ham", package=("foo", "bar"), parent=("Spam",) ) - assert addr.rel(other) == "'Spam.Bacon'" + assert addr.rel(other) == '"Spam.Bacon"' def test_address_rel_nested_sibling_later(): @@ -193,7 +193,7 @@ def test_address_rel_nested_sibling_later(): package=("foo", "bar"), parent=("Spam",), ) - assert addr.rel(other) == "'Spam.Bacon'" + assert addr.rel(other) == '"Spam.Bacon"' def test_address_rel_nested_parent(): diff --git a/tests/unit/schema/wrappers/test_field.py b/tests/unit/schema/wrappers/test_field.py index dd8367c2ef..7491f50cb0 100644 --- a/tests/unit/schema/wrappers/test_field.py +++ b/tests/unit/schema/wrappers/test_field.py @@ -273,7 +273,7 @@ def test_mock_value_original_type_bool(): def test_mock_value_str(): field = make_field(name="foo_bar", type="TYPE_STRING") - assert field.mock_value == "'foo_bar_value'" + assert field.mock_value == '"foo_bar_value"' def test_mock_value_original_type_str(): @@ -293,7 +293,7 @@ def test_mock_value_original_type_bytes(): def test_mock_value_repeated(): field = make_field(name="foo_bar", type="TYPE_STRING", label=3) - assert field.mock_value == "['foo_bar_value']" + assert field.mock_value == '["foo_bar_value"]' def test_mock_value_original_type_repeated(): @@ -318,7 +318,7 @@ def test_mock_value_map(): type="TYPE_MESSAGE", ) - assert field.mock_value == "{'key_value': 'value_value'}" + assert field.mock_value == '{"key_value": "value_value"}' def test_mock_value_enum(): diff --git a/tests/unit/schema/wrappers/test_method.py b/tests/unit/schema/wrappers/test_method.py index 088958e06b..a5be8a0dd9 100644 --- a/tests/unit/schema/wrappers/test_method.py +++ b/tests/unit/schema/wrappers/test_method.py @@ -466,7 +466,7 @@ def test_body_fields(): method = make_method("PutSquid", input_message=input_message, http_rule=http_rule) assert set(method.body_fields) == {"mantle"} mock_value = method.body_fields["mantle"].mock_value - assert mock_value == "baz.Mantle(mantle_stuff='mantle_stuff_value')" + assert mock_value == 'baz.Mantle(mantle_stuff="mantle_stuff_value")' def test_body_fields_no_body():